diff --git a/src/python/diff3_analysis.ipynb b/src/python/diff3_analysis.ipynb index 558935f300..409ad5d373 100644 --- a/src/python/diff3_analysis.ipynb +++ b/src/python/diff3_analysis.ipynb @@ -2,28 +2,105 @@ "cells": [ { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "fasterxml/jackson-core : Cloning repo\n", + "fasterxml/jackson-core : Finished cloning\n", + "fasterxml/jackson-core : Finished cloning\n", + "Checking out left294f761da4b185c997d6f19d0753711b702fe702\n", + "Checking out right2cd126e3f8cbf8fee29f7a21f6c0c5c34fa63344\n", + "Found base shaf554808f2285ab7fa68a7b830e33417712ce4a26\n", + "\n", + "fasterxml/jackson-core : Cloning repo\n", + "fasterxml/jackson-core : Finished cloning\n", + "fasterxml/jackson-core : Finished cloning\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Switched to branch 'TEMP_LEFT_BRANCH'\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Running: git merge --no-edit -s ort TEMP_RIGHT_BRANCH\n", + "Auto-merging pom.xml\n", + "CONFLICT (content): Merge conflict in pom.xml\n", + "Auto-merging release-notes/VERSION\n", + "CONFLICT (content): Merge conflict in release-notes/VERSION\n", + "Automatic merge failed; fix conflicts and then commit the result.\n", + "Conflict\n", + "\n", + "fasterxml/jackson-core : Cloning repo\n", + "fasterxml/jackson-core : Finished cloning\n", + "fasterxml/jackson-core : Finished cloning\n", + "Diff results saved to ./merge_conflict_analysis_diffs/809/gitmerge_ort/diff_pom.xml.txt\n", + "Diff results saved to ./merge_conflict_analysis_diffs/809/gitmerge_ort/diff_VERSION.txt\n", + "fasterxml/jackson-core : Cloning repo\n", + "fasterxml/jackson-core : Finished cloning\n" + ] + }, + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[1], line 31\u001b[0m\n\u001b[1;32m 29\u001b[0m repo_output_dir \u001b[38;5;241m=\u001b[39m os\u001b[38;5;241m.\u001b[39mpath\u001b[38;5;241m.\u001b[39mjoin(base_output_dir, \u001b[38;5;28mstr\u001b[39m(row_num), merge_tool)\n\u001b[1;32m 30\u001b[0m os\u001b[38;5;241m.\u001b[39mmakedirs(repo_output_dir, exist_ok\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[0;32m---> 31\u001b[0m \u001b[43mdiff3_analysis\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmerge_tool\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mrow_num\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mrepo_output_dir\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/Documents/GitHub/AST-Merging-Ben-Analysis/src/python/diff3_analysis.py:38\u001b[0m, in \u001b[0;36mdiff3_analysis\u001b[0;34m(merge_tool, results_index, repo_output_dir)\u001b[0m\n\u001b[1;32m 35\u001b[0m repo_name \u001b[38;5;241m=\u001b[39m df\u001b[38;5;241m.\u001b[39miloc[results_index][\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mrepository\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n\u001b[1;32m 37\u001b[0m script \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m../scripts/merge_tools/\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;241m+\u001b[39m merge_tool \u001b[38;5;241m+\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m.sh\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m---> 38\u001b[0m repo \u001b[38;5;241m=\u001b[39m \u001b[43mclone_repo_to_path\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 39\u001b[0m \u001b[43m \u001b[49m\u001b[43mrepo_name\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43m./repos/merge_attempt\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\n\u001b[1;32m 40\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Return a Git-Python repo object\u001b[39;00m\n\u001b[1;32m 41\u001b[0m repo\u001b[38;5;241m.\u001b[39mremote()\u001b[38;5;241m.\u001b[39mfetch()\n\u001b[1;32m 42\u001b[0m left_sha \u001b[38;5;241m=\u001b[39m df\u001b[38;5;241m.\u001b[39miloc[results_index][\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mleft\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n", + "File \u001b[0;32m~/Documents/GitHub/AST-Merging-Ben-Analysis/src/python/repo.py:77\u001b[0m, in \u001b[0;36mclone_repo_to_path\u001b[0;34m(repo_slug, path)\u001b[0m\n\u001b[1;32m 75\u001b[0m \u001b[38;5;28mprint\u001b[39m(repo_slug, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m : Finished cloning\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 76\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m---> 77\u001b[0m repo \u001b[38;5;241m=\u001b[39m \u001b[43mgit\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrepo\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mRepo\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mclone_from\u001b[49m\u001b[43m(\u001b[49m\u001b[43mgithub_url\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mrepo_dir\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 78\u001b[0m \u001b[38;5;28mprint\u001b[39m(repo_slug, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m : Finished cloning\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 79\u001b[0m repo\u001b[38;5;241m.\u001b[39mremote()\u001b[38;5;241m.\u001b[39mfetch()\n", + "File \u001b[0;32m~/miniconda/envs/research/lib/python3.8/site-packages/git/repo/base.py:1328\u001b[0m, in \u001b[0;36mRepo.clone_from\u001b[0;34m(cls, url, to_path, progress, env, multi_options, allow_unsafe_protocols, allow_unsafe_options, **kwargs)\u001b[0m\n\u001b[1;32m 1326\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m env \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 1327\u001b[0m git\u001b[38;5;241m.\u001b[39mupdate_environment(\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39menv)\n\u001b[0;32m-> 1328\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mcls\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_clone\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1329\u001b[0m \u001b[43m \u001b[49m\u001b[43mgit\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1330\u001b[0m \u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1331\u001b[0m \u001b[43m \u001b[49m\u001b[43mto_path\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1332\u001b[0m \u001b[43m \u001b[49m\u001b[43mGitCmdObjectDB\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1333\u001b[0m \u001b[43m \u001b[49m\u001b[43mprogress\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1334\u001b[0m \u001b[43m \u001b[49m\u001b[43mmulti_options\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1335\u001b[0m \u001b[43m \u001b[49m\u001b[43mallow_unsafe_protocols\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mallow_unsafe_protocols\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1336\u001b[0m \u001b[43m \u001b[49m\u001b[43mallow_unsafe_options\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mallow_unsafe_options\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1337\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1338\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/miniconda/envs/research/lib/python3.8/site-packages/git/repo/base.py:1232\u001b[0m, in \u001b[0;36mRepo._clone\u001b[0;34m(cls, git, url, path, odb_default_type, progress, multi_options, allow_unsafe_protocols, allow_unsafe_options, **kwargs)\u001b[0m\n\u001b[1;32m 1224\u001b[0m handle_process_output(\n\u001b[1;32m 1225\u001b[0m proc,\n\u001b[1;32m 1226\u001b[0m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1229\u001b[0m decode_streams\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m,\n\u001b[1;32m 1230\u001b[0m )\n\u001b[1;32m 1231\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1232\u001b[0m (stdout, stderr) \u001b[38;5;241m=\u001b[39m \u001b[43mproc\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcommunicate\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1233\u001b[0m cmdline \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mgetattr\u001b[39m(proc, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124margs\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 1234\u001b[0m cmdline \u001b[38;5;241m=\u001b[39m remove_password_if_present(cmdline)\n", + "File \u001b[0;32m~/miniconda/envs/research/lib/python3.8/subprocess.py:1028\u001b[0m, in \u001b[0;36mPopen.communicate\u001b[0;34m(self, input, timeout)\u001b[0m\n\u001b[1;32m 1025\u001b[0m endtime \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1027\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m-> 1028\u001b[0m stdout, stderr \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_communicate\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mendtime\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1029\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mKeyboardInterrupt\u001b[39;00m:\n\u001b[1;32m 1030\u001b[0m \u001b[38;5;66;03m# https://bugs.python.org/issue25942\u001b[39;00m\n\u001b[1;32m 1031\u001b[0m \u001b[38;5;66;03m# See the detailed comment in .wait().\u001b[39;00m\n\u001b[1;32m 1032\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m timeout \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n", + "File \u001b[0;32m~/miniconda/envs/research/lib/python3.8/subprocess.py:1884\u001b[0m, in \u001b[0;36mPopen._communicate\u001b[0;34m(self, input, endtime, orig_timeout)\u001b[0m\n\u001b[1;32m 1877\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_check_timeout(endtime, orig_timeout,\n\u001b[1;32m 1878\u001b[0m stdout, stderr,\n\u001b[1;32m 1879\u001b[0m skip_check_and_raise\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[1;32m 1880\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m( \u001b[38;5;66;03m# Impossible :)\u001b[39;00m\n\u001b[1;32m 1881\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m_check_timeout(..., skip_check_and_raise=True) \u001b[39m\u001b[38;5;124m'\u001b[39m\n\u001b[1;32m 1882\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mfailed to raise TimeoutExpired.\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[0;32m-> 1884\u001b[0m ready \u001b[38;5;241m=\u001b[39m \u001b[43mselector\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mselect\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtimeout\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1885\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_check_timeout(endtime, orig_timeout, stdout, stderr)\n\u001b[1;32m 1887\u001b[0m \u001b[38;5;66;03m# XXX Rewrite these to use non-blocking I/O on the file\u001b[39;00m\n\u001b[1;32m 1888\u001b[0m \u001b[38;5;66;03m# objects; they are no longer using C stdio!\u001b[39;00m\n", + "File \u001b[0;32m~/miniconda/envs/research/lib/python3.8/selectors.py:415\u001b[0m, in \u001b[0;36m_PollLikeSelector.select\u001b[0;34m(self, timeout)\u001b[0m\n\u001b[1;32m 413\u001b[0m ready \u001b[38;5;241m=\u001b[39m []\n\u001b[1;32m 414\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 415\u001b[0m fd_event_list \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_selector\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpoll\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtimeout\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 416\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mInterruptedError\u001b[39;00m:\n\u001b[1;32m 417\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m ready\n", + "\u001b[0;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], "source": [ - "row_num = 1444\n", - "merge_tool = \"gitmerge_ort\"\n", - "# merge_tool = \"gitmerge_ort_adjacent\"\n", - "# merge_tool = \"gitmerge_ort_ignorespace\"\n", - "# merge_tool = \"gitmerge_ort_imports\"\n", - "# merge_tool = \"gitmerge_ort_imports_ignorespace\"\n", - "# merge_tool = \"gitmerge_resolve\"\n", - "# merge_tool = \"gitmerge_recursive_histogram\"\n", - "# merge_tool = \"gitmerge_recursive_ignorespace\"\n", - "# merge_tool = \"gitmerge_recursive_minimal\"\t\n", - "# merge_tool = \"gitmerge_recursive_myers\"\n", - "# merge_tool = \"gitmerge_recursive_patience\"\n", - "# merge_tool = \"git_hires_merge\"\n", - "# merge_tool = \"spork\"\n", - "# merge_tool = \"intellimerge\"\n", - "\n", "from diff3_analysis import diff3_analysis\n", - "diff3_analysis(merge_tool, row_num)" + "import os\n", + "\n", + "row_nums = [\n", + " 582, 427, 930, 70, 128, 1444, 1177, 849, 1425, 1642, 1897, 862, 943, 1442, 1120,\n", + " 111, 693, 535, 354, 530, 845, 654, 921, 464, 1006, 707, 485, 1928, 809, 1329, 65, 1890, 100, 247, 2038, 900\n", + "]\n", + "\n", + "merge_tools = [\"gitmerge_ort\", \n", + " \"gitmerge_ort_adjacent\", \n", + " \"gitmerge_ort_ignorespace\", \n", + " \"gitmerge_ort_imports\", \n", + " \"gitmerge_ort_imports_ignorespace\", \n", + " \"gitmerge_resolve\",\n", + " \"gitmerge_recursive_histogram\", \n", + " \"gitmerge_recursive_ignorespace\", \n", + " \"gitmerge_recursive_minimal\", \n", + " \"gitmerge_recursive_myers\", \n", + " \"gitmerge_recursive_patience\",\n", + " \"git_hires_merge\",\n", + " \"spork\", \n", + " \"intellimerge\"]\n", + "\n", + "# Ensure the base output directory exists\n", + "base_output_dir = \"./merge_conflict_analysis_diffs\"\n", + "\n", + "for row_num in row_nums:\n", + " for merge_tool in merge_tools:\n", + " # Create a subdirectory for this specific results_index\n", + " repo_output_dir = os.path.join(base_output_dir, str(row_num), merge_tool)\n", + " os.makedirs(repo_output_dir, exist_ok=True)\n", + " diff3_analysis(merge_tool, row_num, repo_output_dir)\n" ] } ], diff --git a/src/python/diff3_analysis.py b/src/python/diff3_analysis.py index b1aeba9123..8acf862db7 100644 --- a/src/python/diff3_analysis.py +++ b/src/python/diff3_analysis.py @@ -13,14 +13,15 @@ # pylint: disable-msg=too-many-locals -def diff3_analysis(merge_tool: str, results_index: int): +def diff3_analysis(merge_tool: str, results_index: int, repo_output_dir): """ Analyzes merge conflicts using the diff3 tool and opens the results in the default text viewer. Args: merge_tool (str): The merge tool to be used. results_index (int): The index of the repository in the results DataFrame. - + repo_output_dir (path): The path of where we want to store the results from the analysis + Returns: None """ @@ -29,12 +30,6 @@ def diff3_analysis(merge_tool: str, results_index: int): # We do this to prevent errors if cloning the same repo into the folder twice shutil.rmtree("./repos", ignore_errors=True) - # Ensure the base output directory exists - base_output_dir = "./merge_conflict_analysis_diffs" - # Create a subdirectory for this specific results_index - repo_output_dir = os.path.join(base_output_dir, f"index_{results_index}") - os.makedirs(repo_output_dir, exist_ok=True) - # Retrieve left and right branch from hash in repo df = pd.read_csv("../../results_greatest_hits/result.csv") repo_name = df.iloc[results_index]["repository"] diff --git a/src/python/merge_conflict_analysis_diffs/1006/git_hires_merge/diff_Gson.java.txt b/src/python/merge_conflict_analysis_diffs/1006/git_hires_merge/diff_Gson.java.txt new file mode 100644 index 0000000000..979214efc7 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/git_hires_merge/diff_Gson.java.txt @@ -0,0 +1,1124 @@ +====1 +1:19,37c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; + +2:18a +3:18a +====3 +1:53a +2:34a +3:35c + import com.google.gson.internal.bind.SerializationDelegatingTypeAdapter; +==== +1:60a +2:42,61c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +3:43,63c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +====3 +1:78c +2:79c + * String json = gson.toJson(target); // serializes target to Json +3:81c + * String json = gson.toJson(target); // serializes target to JSON +====3 +1:82,86c +2:83,87c + *

If the object that your are serializing/deserializing is a {@code ParameterizedType} + * (i.e. contains at least one type parameter and may be an array) then you must use the + * {@link #toJson(Object, Type)} or {@link #fromJson(String, Type)} method. Here is an + * example for serializing and deserializing a {@code ParameterizedType}: + * +3:85,91c + *

If the type of the object that you are converting is a {@code ParameterizedType} + * (i.e. has at least one type argument, for example {@code List}) then for + * deserialization you must use a {@code fromJson} method with {@link Type} or {@link TypeToken} + * parameter to specify the parameterized type. For serialization specifying a {@code Type} + * or {@code TypeToken} is optional, otherwise Gson will use the runtime type of the object. + * {@link TypeToken} is a class provided by Gson which helps creating parameterized types. + * Here is an example showing how this can be done: +====3 +1:88,90c +2:89,91c + * Type listType = new TypeToken<List<String>>() {}.getType(); + * List<String> target = new LinkedList<String>(); + * target.add("blah"); +3:93,95c + * TypeToken<List<MyType>> listType = new TypeToken<List<MyType>>() {}; + * List<MyType> target = new LinkedList<MyType>(); + * target.add(new MyType(1, "abc")); +====3 +1:93,94c +2:94,95c + * String json = gson.toJson(target, listType); + * List<String> target2 = gson.fromJson(json, listType); +3:98,103c + * // For serialization you normally do not have to specify the type, Gson will use + * // the runtime type of the objects, however you can also specify it explicitly + * String json = gson.toJson(target, listType.getType()); + * + * // But for deserialization you have to specify the type + * List<MyType> target2 = gson.fromJson(json, listType); +====3 +1:97c +2:98c + *

See the Gson User Guide +3:106c + *

See the Gson User Guide +====3 +1:100c +2:101c + * @see com.google.gson.reflect.TypeToken +3:109,136c + *

Lenient JSON handling

+ * For legacy reasons most of the {@code Gson} methods allow JSON data which does not + * comply with the JSON specification, regardless of whether {@link GsonBuilder#setLenient()} + * is used or not. If this behavior is not desired, the following workarounds can be used: + * + *

Serialization

+ *
    + *
  1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be serialized + *
  2. When using an existing {@code JsonWriter}, manually apply the writer settings of this + * {@code Gson} instance listed by {@link #newJsonWriter(Writer)}.
    + * Otherwise, when not using an existing {@code JsonWriter}, use {@link #newJsonWriter(Writer)} + * to construct one. + *
  3. Call {@link TypeAdapter#write(JsonWriter, Object)} + *
+ * + *

Deserialization

+ *
    + *
  1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be deserialized + *
  2. When using an existing {@code JsonReader}, manually apply the reader settings of this + * {@code Gson} instance listed by {@link #newJsonReader(Reader)}.
    + * Otherwise, when not using an existing {@code JsonReader}, use {@link #newJsonReader(Reader)} + * to construct one. + *
  3. Call {@link TypeAdapter#read(JsonReader)} + *
  4. Call {@link JsonReader#peek()} and verify that the result is {@link JsonToken#END_DOCUMENT} + * to make sure there is no trailing data + *
+ * + * @see TypeToken +====3 +1:120c +2:121c + private static final TypeToken NULL_KEY_SURROGATE = TypeToken.get(Object.class); +3:155a +====1 +1:130,131c + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal, FutureTypeAdapter>>(); +2:131,132c +3:165,166c + // Uses LinkedHashMap because iteration order is important, see getAdapter() implementation below + private final ThreadLocal, TypeAdapter>> calls = new ThreadLocal<>(); +==== +1:133c + private final Map, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); +2:134c + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); +3:168c + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap<>(); +====3 +1:158a +2:159a +3:194c + final List reflectionFilters; +====3 +1:185c +2:186c + *
  • The default field naming policy for the output Json is same as in Java. So, a Java class +3:221c + *
  • The default field naming policy for the output JSON is same as in Java. So, a Java class +====3 +1:187c +2:188c + * Json. The same rules are applied for mapping incoming Json to the Java classes. You can +3:223c + * JSON. The same rules are applied for mapping incoming JSON to the Java classes. You can +====3 +1:202c +2:203c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY); +3:238,239c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====3 +1:214c +2:215c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy) { +3:251,252c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy, + List reflectionFilters) { +====3 +1:218c +2:219c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe); +3:256c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe, reflectionFilters); +====3 +1:234a +2:235a +3:273c + this.reflectionFilters = reflectionFilters; +====3 +1:236c +2:237c + List factories = new ArrayList(); +3:275c + List factories = new ArrayList<>(); +====3 +1:299c +2:300c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory)); +3:338c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory, reflectionFilters)); +====3 +1:308a +2:309a +3:348c + * @since 2.8.3 +====3 +1:371c +2:372c + out.value(value); +3:411c + out.value(doubleValue); +====3 +1:395c +2:396c + out.value(value); +3:435,438c + // For backward compatibility don't call `JsonWriter.value(float)` because that method has + // been newly added and not all custom JsonWriter implementations might override it yet + Number floatNumber = value instanceof Float ? value : floatValue; + out.value(floatNumber); +====3 +1:452c +2:453c + List list = new ArrayList(); +3:495c + List list = new ArrayList<>(); +====3 +1:475c +2:476c + @SuppressWarnings("unchecked") +3:517a +====3 +1:477c +2:478c + TypeAdapter cached = typeTokenCache.get(type == null ? NULL_KEY_SURROGATE : type); +3:519,520c + Objects.requireNonNull(type, "type must not be null"); + TypeAdapter cached = typeTokenCache.get(type); +====3 +1:479c +2:480c + return (TypeAdapter) cached; +3:522,524c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) cached; + return adapter; +====1 +1:482,483c + Map, FutureTypeAdapter> threadCalls = calls.get(); + boolean requiresThreadLocalCleanup = false; +2:483,484c +3:527,528c + LinkedHashMap, TypeAdapter> threadCalls = calls.get(); + boolean isInitialAdapterRequest = false; +====1 +1:485c + threadCalls = new HashMap, FutureTypeAdapter>(); +2:486c +3:530c + threadCalls = new LinkedHashMap<>(); +====1 +1:487c + requiresThreadLocalCleanup = true; +2:488c +3:532c + isInitialAdapterRequest = true; +==== +1:491c + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); +2:492c + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); +3:536,537c + @SuppressWarnings("unchecked") + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); +====1 +1:495a +2:497,498c +3:542,543c + int existingAdaptersCount = threadCalls.size(); + boolean foundCandidate = false; +====1 +1:497c + FutureTypeAdapter call = new FutureTypeAdapter(); +2:500c +3:545c + FutureTypeAdapter call = new FutureTypeAdapter<>(); +==== +1:504c + typeTokenCache.put(type, candidate); +2:507,519c + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + } + foundCandidate = true; +3:552,570c + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + + @SuppressWarnings("unchecked") + TypeAdapter actualAdapter = (TypeAdapter) typeTokenCache.get(type); + // Prefer the actual adapter, in case putIfAbsent call above had no effect because other + // thread already concurrently added other adapter instance for the same type + candidate = actualAdapter; + } + foundCandidate = true; +====1 +1:510,512c + threadCalls.remove(type); + + if (requiresThreadLocalCleanup) { +2:525c +3:576c + if (isInitialAdapterRequest) { +==== +1:514a +2:528,545c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +3:579,596c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +====3 +1:607c +2:638c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +3:689c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====3 +1:611,612c +2:642,643c + * @param src the object for which Json representation is to be created setting for Gson + * @return Json representation of {@code src}. +3:693,694c + * @param src the object for which JSON representation is to be created + * @return JSON representation of {@code src}. +====3 +1:613a +2:644a +3:696,697c + * + * @see #toJsonTree(Object, Type) +====3 +1:636a +2:667a +3:721,722c + * + * @see #toJsonTree(Object) +====3 +1:645c +2:676c + * This method serializes the specified object into its equivalent Json representation. +3:731c + * This method serializes the specified object into its equivalent JSON representation. +====3 +1:649c +2:680c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +3:735c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====3 +1:654c +2:685c + * @param src the object for which Json representation is to be created setting for Gson +3:740c + * @param src the object for which JSON representation is to be created +====3 +1:655a +2:686a +3:742,744c + * + * @see #toJson(Object, Appendable) + * @see #toJson(Object, Type) +====3 +1:666c +2:697c + * equivalent Json representation. This method must be used if the specified object is a generic +3:755c + * equivalent JSON representation. This method must be used if the specified object is a generic +====3 +1:677c +2:708c + * @return Json representation of {@code src} +3:766,769c + * @return JSON representation of {@code src} + * + * @see #toJson(Object, Type, Appendable) + * @see #toJson(Object) +====3 +1:686c +2:717c + * This method serializes the specified object into its equivalent Json representation. +3:778,779c + * This method serializes the specified object into its equivalent JSON representation and + * writes it to the writer. +====3 +1:690c +2:721c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +3:783c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====3 +1:694,695c +2:725,726c + * @param src the object for which Json representation is to be created setting for Gson + * @param writer Writer to which the Json representation needs to be written +3:787,788c + * @param src the object for which JSON representation is to be created + * @param writer Writer to which the JSON representation needs to be written +====3 +1:697a +2:728a +3:791,793c + * + * @see #toJson(Object) + * @see #toJson(Object, Type, Appendable) +====3 +1:709,710c +2:740,741c + * equivalent Json representation. This method must be used if the specified object is a generic + * type. For non-generic objects, use {@link #toJson(Object, Appendable)} instead. +3:805,807c + * equivalent JSON representation and writes it to the writer. + * This method must be used if the specified object is a generic type. For non-generic objects, + * use {@link #toJson(Object, Appendable)} instead. +====3 +1:719c +2:750c + * @param writer Writer to which the Json representation of src needs to be written. +3:816c + * @param writer Writer to which the JSON representation of src needs to be written. +====3 +1:721a +2:752a +3:819,821c + * + * @see #toJson(Object, Type) + * @see #toJson(Object, Appendable) +====3 +1:734a +2:765a +3:835,843c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====3 +1:737c +2:768c + @SuppressWarnings("unchecked") +3:845a +====3 +1:739c +2:770c + TypeAdapter adapter = getAdapter(TypeToken.get(typeOfSrc)); +3:847,848c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) getAdapter(TypeToken.get(typeOfSrc)); +====3 +1:747c +2:778c + ((TypeAdapter) adapter).write(writer, src); +3:856c + adapter.write(writer, src); +====3 +1:778c +2:809c + * @param writer Writer to which the Json representation needs to be written +3:887c + * @param writer Writer to which the JSON representation needs to be written +====3 +1:832a +2:863a +3:942,950c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====3 +1:858c +2:889c + * This method deserializes the specified Json into an object of the specified class. It is not +3:976c + * This method deserializes the specified JSON into an object of the specified class. It is not +====3 +1:864c +2:895c + * {@link #fromJson(String, Type)}. If you have the Json in a {@link Reader} instead of +3:982c + * {@link #fromJson(String, TypeToken)}. If you have the JSON in a {@link Reader} instead of +====3 +1:866a +2:897a +3:985,987c + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====3 +1:873a +2:904a +3:995,997c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(String, TypeToken) +====3 +1:876c +2:907c + Object object = fromJson(json, (Type) classOfT); +3:1000c + T object = fromJson(json, TypeToken.get(classOfT)); +====3 +1:881c +2:912c + * This method deserializes the specified Json into an object of the specified type. This method +3:1005c + * This method deserializes the specified JSON into an object of the specified type. This method +====3 +1:883c +2:914c + * {@link #fromJson(String, Class)} instead. If you have the Json in a {@link Reader} instead of +3:1007c + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of +====3 +1:886,889c +2:917,920c + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for +3:1010,1047c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(String, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, + * or if there is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is + * not desired. + * + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the string. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(String, Class) + * @see #fromJson(String, TypeToken) + */ + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the specified JSON into an object of the specified type. This method + * is useful if the specified object is a generic type. For non-generic objects, use + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of + * a String, use {@link #fromJson(Reader, TypeToken)} instead. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for +====3 +1:892c +2:923c + * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType(); +3:1050c + * new TypeToken<Collection<Foo>>(){} +====3 +1:896,897c +2:927,928c + * @throws JsonParseException if json is not a valid representation for an object of type typeOfT + * @throws JsonSyntaxException if json is not a valid representation for an object of type +3:1054,1058c + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(String, Class) + * @since 2.10 +====3 +1:899,900c +2:930,931c + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { +3:1060c + public T fromJson(String json, TypeToken typeOfT) throws JsonSyntaxException { +====3 +1:905,906c +2:936,937c + T target = (T) fromJson(reader, typeOfT); + return target; +3:1065c + return fromJson(reader, typeOfT); +====3 +1:910c +2:941c + * This method deserializes the Json read from the specified reader into an object of the +3:1069c + * This method deserializes the JSON read from the specified reader into an object of the +====3 +1:914c +2:945c + * this method works fine if the any of the fields of the specified object are generics, just the +3:1073c + * this method works fine if any of the fields of the specified object are generics, just the +====3 +1:916c +2:947c + * invoke {@link #fromJson(Reader, Type)}. If you have the Json in a String form instead of a +3:1075c + * invoke {@link #fromJson(Reader, TypeToken)}. If you have the JSON in a String form instead of a +====3 +1:918a +2:949a +3:1078,1080c + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====3 +1:920c +2:951c + * @param json the reader producing the Json from which the object is to be deserialized. +3:1082c + * @param json the reader producing the JSON from which the object is to be deserialized. +====3 +1:922c +2:953c + * @return an object of type T from the string. Returns {@code null} if {@code json} is at EOF. +3:1084c + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====3 +1:924c +2:955c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +3:1086c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====3 +1:925a +2:956a +3:1088,1090c + * + * @see #fromJson(String, Class) + * @see #fromJson(Reader, TypeToken) +====3 +1:928,930c +2:959,961c + JsonReader jsonReader = newJsonReader(json); + Object object = fromJson(jsonReader, classOfT); + assertFullConsumption(object, jsonReader); +3:1093c + T object = fromJson(json, TypeToken.get(classOfT)); +====3 +1:935c +2:966c + * This method deserializes the Json read from the specified reader into an object of the +3:1098c + * This method deserializes the JSON read from the specified reader into an object of the +====3 +1:937c +2:968c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the Json in a +3:1100c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a +====3 +1:939a +2:970a +3:1103,1110c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(Reader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====3 +1:941,948c +2:972,979c + * @param json the reader producing Json from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is at EOF. +3:1112,1114c + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====3 +1:950c +2:981c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +3:1116c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====3 +1:951a +2:982a +3:1118,1121c + * + * @see #fromJson(String, Type) + * @see #fromJson(Reader, Class) + * @see #fromJson(Reader, TypeToken) +====3 +1:954a +2:985a +3:1125,1153c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified reader into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a + * String form instead of a {@link Reader}, use {@link #fromJson(String, TypeToken)} instead. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * + * @param the type of the desired object + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. + * @throws JsonIOException if there was a problem reading from the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type of typeOfT + * + * @see #fromJson(String, TypeToken) + * @see #fromJson(Reader, Class) + * @since 2.10 + */ + public T fromJson(Reader json, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====3 +1:956c +2:987c + T object = (T) fromJson(jsonReader, typeOfT); +3:1155c + T object = fromJson(jsonReader, typeOfT); +====3 +1:964c +2:995c + throw new JsonIOException("JSON document was not fully consumed."); +3:1163c + throw new JsonSyntaxException("JSON document was not fully consumed."); +====3 +1:972a +2:1003a +3:1172,1174c + // fromJson(JsonReader, Class) is unfortunately missing and cannot be added now without breaking + // source compatibility in certain cases, see https://github.com/google/gson/pull/1700#discussion_r973764414 + +====3 +1:974c +2:1005c + * Reads the next JSON value from {@code reader} and convert it to an object +3:1176c + * Reads the next JSON value from {@code reader} and converts it to an object +====3 +1:976c +2:1007c + * Since Type is not parameterized by T, this method is type unsafe and should be used carefully +3:1177a +====3 +1:978,979c +2:1009,1010c + * @throws JsonIOException if there was a problem writing to the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type +3:1179,1200c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonReader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. If the provided type is a + * {@code Class} the {@code TypeToken} can be created with {@link TypeToken#get(Class)}. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonReader, TypeToken) +====3 +1:982a +2:1013a +3:1204,1237c + return (T) fromJson(reader, TypeToken.get(typeOfT)); + } + + /** + * Reads the next JSON value from {@code reader} and converts it to an object + * of type {@code typeOfT}. Returns {@code null}, if the {@code reader} is at EOF. + * This method is useful if the specified object is a generic type. For non-generic objects, + * {@link #fromJson(JsonReader, Type)} can be called, or {@link TypeToken#get(Class)} can + * be used to create the type token. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonReader, Type) + * @since 2.10 + */ + public T fromJson(JsonReader reader, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====3 +1:989,990c +2:1020,1021c + TypeToken typeToken = (TypeToken) TypeToken.get(typeOfT); + TypeAdapter typeAdapter = getAdapter(typeToken); +3:1244c + TypeAdapter typeAdapter = getAdapter(typeOfT); +====3 +1:1017c +2:1048c + * This method deserializes the Json read from the specified parse tree into an object of the +3:1271c + * This method deserializes the JSON read from the specified parse tree into an object of the +====3 +1:1021c +2:1052c + * this method works fine if the any of the fields of the specified object are generics, just the +3:1275c + * this method works fine if any of the fields of the specified object are generics, just the +====3 +1:1023c +2:1054c + * invoke {@link #fromJson(JsonElement, Type)}. +3:1277,1278c + * invoke {@link #fromJson(JsonElement, TypeToken)}. + * +====3 +1:1028c +2:1059c + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +3:1283c + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====3 +1:1030c +2:1061c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +3:1285c + * @throws JsonSyntaxException if json is not a valid representation for an object of type classOfT +====3 +1:1031a +2:1062a +3:1287,1289c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(JsonElement, TypeToken) +====3 +1:1034c +2:1065c + Object object = fromJson(json, (Type) classOfT); +3:1292c + T object = fromJson(json, TypeToken.get(classOfT)); +====3 +1:1039c +2:1070c + * This method deserializes the Json read from the specified parse tree into an object of the +3:1297c + * This method deserializes the JSON read from the specified parse tree into an object of the +====3 +1:1042a +2:1073a +3:1301,1305c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonElement, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * +====3 +1:1046,1052c +2:1077,1083c + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +3:1309,1310c + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====3 +1:1055a +2:1086a +3:1314,1317c + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonElement, Class) + * @see #fromJson(JsonElement, TypeToken) +====3 +1:1058a +2:1089a +3:1321,1346c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified parse tree into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(JsonElement, Class)} instead. + * + * @param the type of the desired object + * @param json the root of the parse tree of {@link JsonElement}s from which the object is to + * be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *
    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonElement, Class) + * @since 2.10 + */ + public T fromJson(JsonElement json, TypeToken typeOfT) throws JsonSyntaxException { +====3 +1:1062c +2:1093c + return (T) fromJson(new JsonTreeReader(json), typeOfT); +3:1350c + return fromJson(new JsonTreeReader(json), typeOfT); +==== +1:1065,1066c + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate; +2:1096,1098c + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; +3:1353,1355c + static class FutureTypeAdapter extends SerializationDelegatingTypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; +==== +1:1075c + @Override public T read(JsonReader in) throws IOException { +2:1107,1115c + public void markBroken() { + isBroken = true; + } + + private TypeAdapter getResolvedDelegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } +3:1364,1372c + public void markBroken() { + isBroken = true; + } + + private TypeAdapter delegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } +====1 +1:1077c + throw new IllegalStateException(); +2:1117,1120c +3:1374,1377c + // Can occur when adapter is leaked to other thread or when adapter is used for (de-)serialization + // directly within the TypeAdapterFactory which requested it + throw new IllegalStateException("Adapter for type with cyclic dependency has been used" + + " before dependency has been resolved"); +==== +1:1079c + return delegate.read(in); +2:1122,1126c + return delegate; + } + + @Override public T read(JsonReader in) throws IOException { + return getResolvedDelegate().read(in); +3:1379,1387c + return delegate; + } + + @Override public TypeAdapter getSerializationDelegate() { + return delegate(); + } + + @Override public T read(JsonReader in) throws IOException { + return delegate().read(in); +==== +1:1083,1086c + if (delegate == null) { + throw new IllegalStateException(); + } + delegate.write(out, value); +2:1130c + getResolvedDelegate().write(out, value); +3:1391c + delegate().write(out, value); diff --git a/src/python/merge_conflict_analysis_diffs/1006/git_hires_merge/diff_GsonTest.java.txt b/src/python/merge_conflict_analysis_diffs/1006/git_hires_merge/diff_GsonTest.java.txt new file mode 100644 index 0000000000..76854475ca --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/git_hires_merge/diff_GsonTest.java.txt @@ -0,0 +1,681 @@ +====1 +1:18a +2:19c +3:19c + import com.google.gson.Gson.FutureTypeAdapter; +====1 +1:19a +2:21c +3:21c + import com.google.gson.reflect.TypeToken; +====3 +1:29a +2:31a +3:32c + import java.util.Collections; +==== +1:30a +2:33,34c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.atomic.AtomicReference; +3:34,36c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.atomic.AtomicInteger; + import java.util.concurrent.atomic.AtomicReference; +====3 +1:59c +2:63c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +3:65,66c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====3 +1:73c +2:77c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +3:80,81c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====3 +1:88a +2:92a +3:97,149c + public void testGetAdapter_Null() { + Gson gson = new Gson(); + try { + gson.getAdapter((TypeToken) null); + fail(); + } catch (NullPointerException e) { + assertEquals("type must not be null", e.getMessage()); + } + } + + public void testGetAdapter_Concurrency() { + final AtomicInteger adapterInstancesCreated = new AtomicInteger(0); + final AtomicReference> threadAdapter = new AtomicReference<>(); + final Class requestedType = Number.class; + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + private volatile boolean isFirstCall = true; + + @Override public TypeAdapter create(final Gson gson, TypeToken type) { + if (isFirstCall) { + isFirstCall = false; + + // Create a separate thread which requests an adapter for the same type + // This will cause this factory to return a different adapter instance than + // the one it is currently creating + Thread thread = new Thread() { + @Override public void run() { + threadAdapter.set(gson.getAdapter(requestedType)); + } + }; + thread.start(); + try { + thread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + // Create a new dummy adapter instance + adapterInstancesCreated.incrementAndGet(); + return new DummyAdapter<>(); + } + }) + .create(); + + TypeAdapter adapter = gson.getAdapter(requestedType); + assertTrue(adapter instanceof DummyAdapter); + assertEquals(2, adapterInstancesCreated.get()); + // Should be the same adapter instance the concurrent thread received + assertSame(threadAdapter.get(), adapter); + } + +==== +1:154a +2:159,374c + + /** + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + if (callCount == 0) { + callCount++; + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + } +3:216,581c + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a + * {@code new Gson()} should not affect the Gson instance it came from. + */ + public void testDefaultGsonNewBuilderModification() { + Gson gson = new Gson(); + GsonBuilder gsonBuilder = gson.newBuilder(); + + // Modifications of `gsonBuilder` should not affect `gson` object + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }); + + assertDefaultGson(gson); + // New GsonBuilder created from `gson` should not have been affected by changes either + assertDefaultGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should use custom adapters + assertCustomGson(gsonBuilder.create()); + } + + private static void assertDefaultGson(Gson gson) { + // Should use default reflective adapter + String json1 = gson.toJson(new CustomClass1()); + assertEquals("{}", json1); + + // Should use default reflective adapter + String json2 = gson.toJson(new CustomClass2()); + assertEquals("{}", json2); + + // Should use default instance creator + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals(CustomClass3.NO_ARG_CONSTRUCTOR_VALUE, customClass3.s); + } + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a custom + * Gson instance (created using a GsonBuilder) should not affect the Gson instance + * it came from. + */ + public void testNewBuilderModification() { + Gson gson = new GsonBuilder() + .registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }) + .registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }) + .registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }) + .create(); + + assertCustomGson(gson); + + // Modify `gson.newBuilder()` + GsonBuilder gsonBuilder = gson.newBuilder(); + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("overwritten custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("overwritten custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("overwritten custom-instance"); + } + }); + + // `gson` object should not have been affected by changes to new GsonBuilder + assertCustomGson(gson); + // New GsonBuilder based on `gson` should not have been affected either + assertCustomGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should be affected by changes + Gson otherGson = gsonBuilder.create(); + String json1 = otherGson.toJson(new CustomClass1()); + assertEquals("\"overwritten custom-adapter\"", json1); + + String json2 = otherGson.toJson(new CustomClass2()); + assertEquals("\"overwritten custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = otherGson.fromJson("{}", CustomClass3.class); + assertEquals("overwritten custom-instance", customClass3.s); + } + + private static void assertCustomGson(Gson gson) { + String json1 = gson.toJson(new CustomClass1()); + assertEquals("\"custom-adapter\"", json1); + + String json2 = gson.toJson(new CustomClass2()); + assertEquals("\"custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals("custom-instance", customClass3.s); + } + + static class CustomClass1 { } + static class CustomClass2 { } + static class CustomClass3 { + static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance"; + + final String s; + + public CustomClass3(String s) { + this.s = s; + } + + public CustomClass3() { + this(NO_ARG_CONSTRUCTOR_VALUE); + } + } + + /** + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + try { + if (callCount++ == 0) { + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } finally { + callCount--; + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + } diff --git a/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort/diff_Gson.java.txt b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort/diff_Gson.java.txt new file mode 100644 index 0000000000..588a5039ad --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort/diff_Gson.java.txt @@ -0,0 +1,1218 @@ +====1 +1:19,37c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; + +2:18a +3:18a +====1 +1:53a +2:35c +3:35c + import com.google.gson.internal.bind.SerializationDelegatingTypeAdapter; +==== +1:60a +2:43,70c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + <<<<<<< HEAD + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + ||||||| 47dea2ee + ======= + import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + >>>>>>> TEMP_RIGHT_BRANCH + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +3:43,63c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +====1 +1:78c + * String json = gson.toJson(target); // serializes target to Json +2:88c +3:81c + * String json = gson.toJson(target); // serializes target to JSON +====1 +1:82,86c + *

    If the object that your are serializing/deserializing is a {@code ParameterizedType} + * (i.e. contains at least one type parameter and may be an array) then you must use the + * {@link #toJson(Object, Type)} or {@link #fromJson(String, Type)} method. Here is an + * example for serializing and deserializing a {@code ParameterizedType}: + * +2:92,98c +3:85,91c + *

    If the type of the object that you are converting is a {@code ParameterizedType} + * (i.e. has at least one type argument, for example {@code List}) then for + * deserialization you must use a {@code fromJson} method with {@link Type} or {@link TypeToken} + * parameter to specify the parameterized type. For serialization specifying a {@code Type} + * or {@code TypeToken} is optional, otherwise Gson will use the runtime type of the object. + * {@link TypeToken} is a class provided by Gson which helps creating parameterized types. + * Here is an example showing how this can be done: +====1 +1:88,90c + * Type listType = new TypeToken<List<String>>() {}.getType(); + * List<String> target = new LinkedList<String>(); + * target.add("blah"); +2:100,102c +3:93,95c + * TypeToken<List<MyType>> listType = new TypeToken<List<MyType>>() {}; + * List<MyType> target = new LinkedList<MyType>(); + * target.add(new MyType(1, "abc")); +====1 +1:93,94c + * String json = gson.toJson(target, listType); + * List<String> target2 = gson.fromJson(json, listType); +2:105,110c +3:98,103c + * // For serialization you normally do not have to specify the type, Gson will use + * // the runtime type of the objects, however you can also specify it explicitly + * String json = gson.toJson(target, listType.getType()); + * + * // But for deserialization you have to specify the type + * List<MyType> target2 = gson.fromJson(json, listType); +====1 +1:97c + *

    See the Gson User Guide +2:113c +3:106c + *

    See the Gson User Guide +====1 +1:100c + * @see com.google.gson.reflect.TypeToken +2:116,143c +3:109,136c + *

    Lenient JSON handling

    + * For legacy reasons most of the {@code Gson} methods allow JSON data which does not + * comply with the JSON specification, regardless of whether {@link GsonBuilder#setLenient()} + * is used or not. If this behavior is not desired, the following workarounds can be used: + * + *

    Serialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be serialized + *
    2. When using an existing {@code JsonWriter}, manually apply the writer settings of this + * {@code Gson} instance listed by {@link #newJsonWriter(Writer)}.
      + * Otherwise, when not using an existing {@code JsonWriter}, use {@link #newJsonWriter(Writer)} + * to construct one. + *
    3. Call {@link TypeAdapter#write(JsonWriter, Object)} + *
    + * + *

    Deserialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be deserialized + *
    2. When using an existing {@code JsonReader}, manually apply the reader settings of this + * {@code Gson} instance listed by {@link #newJsonReader(Reader)}.
      + * Otherwise, when not using an existing {@code JsonReader}, use {@link #newJsonReader(Reader)} + * to construct one. + *
    3. Call {@link TypeAdapter#read(JsonReader)} + *
    4. Call {@link JsonReader#peek()} and verify that the result is {@link JsonToken#END_DOCUMENT} + * to make sure there is no trailing data + *
    + * + * @see TypeToken +====1 +1:120c + private static final TypeToken NULL_KEY_SURROGATE = TypeToken.get(Object.class); +2:162a +3:155a +==== +1:130,133c + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal, FutureTypeAdapter>>(); + + private final Map, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); +2:172,189c + <<<<<<< HEAD + // Uses LinkedHashMap because iteration order is important, see getAdapter() implementation below + private final ThreadLocal, TypeAdapter>> calls = new ThreadLocal<>(); + ||||||| 47dea2ee + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal, FutureTypeAdapter>>(); + ======= + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal<>(); + >>>>>>> TEMP_RIGHT_BRANCH + + <<<<<<< HEAD + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); + ||||||| 47dea2ee + private final Map, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); + ======= + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap<>(); + >>>>>>> TEMP_RIGHT_BRANCH +3:165,168c + // Uses LinkedHashMap because iteration order is important, see getAdapter() implementation below + private final ThreadLocal, TypeAdapter>> calls = new ThreadLocal<>(); + + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap<>(); +====1 +1:158a +2:215c +3:194c + final List reflectionFilters; +====1 +1:185c + *
  • The default field naming policy for the output Json is same as in Java. So, a Java class +2:242c +3:221c + *
  • The default field naming policy for the output JSON is same as in Java. So, a Java class +====1 +1:187c + * Json. The same rules are applied for mapping incoming Json to the Java classes. You can +2:244c +3:223c + * JSON. The same rules are applied for mapping incoming JSON to the Java classes. You can +====1 +1:202c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY); +2:259,260c +3:238,239c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====1 +1:214c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy) { +2:272,273c +3:251,252c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy, + List reflectionFilters) { +====1 +1:218c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe); +2:277c +3:256c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe, reflectionFilters); +====1 +1:234a +2:294c +3:273c + this.reflectionFilters = reflectionFilters; +====1 +1:236c + List factories = new ArrayList(); +2:296c +3:275c + List factories = new ArrayList<>(); +====1 +1:299c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory)); +2:359c +3:338c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory, reflectionFilters)); +====1 +1:308a +2:369c +3:348c + * @since 2.8.3 +====1 +1:371c + out.value(value); +2:432c +3:411c + out.value(doubleValue); +====1 +1:395c + out.value(value); +2:456,459c +3:435,438c + // For backward compatibility don't call `JsonWriter.value(float)` because that method has + // been newly added and not all custom JsonWriter implementations might override it yet + Number floatNumber = value instanceof Float ? value : floatValue; + out.value(floatNumber); +====1 +1:452c + List list = new ArrayList(); +2:516c +3:495c + List list = new ArrayList<>(); +====1 +1:475c + @SuppressWarnings("unchecked") +2:538a +3:517a +====1 +1:477c + TypeAdapter cached = typeTokenCache.get(type == null ? NULL_KEY_SURROGATE : type); +2:540,541c +3:519,520c + Objects.requireNonNull(type, "type must not be null"); + TypeAdapter cached = typeTokenCache.get(type); +====1 +1:479c + return (TypeAdapter) cached; +2:543,545c +3:522,524c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) cached; + return adapter; +====1 +1:482,483c + Map, FutureTypeAdapter> threadCalls = calls.get(); + boolean requiresThreadLocalCleanup = false; +2:548,549c +3:527,528c + LinkedHashMap, TypeAdapter> threadCalls = calls.get(); + boolean isInitialAdapterRequest = false; +==== +1:485c + threadCalls = new HashMap, FutureTypeAdapter>(); +2:551,557c + <<<<<<< HEAD + threadCalls = new LinkedHashMap<>(); + ||||||| 47dea2ee + threadCalls = new HashMap, FutureTypeAdapter>(); + ======= + threadCalls = new HashMap<>(); + >>>>>>> TEMP_RIGHT_BRANCH +3:530c + threadCalls = new LinkedHashMap<>(); +====1 +1:487c + requiresThreadLocalCleanup = true; +2:559c +3:532c + isInitialAdapterRequest = true; +==== +1:491c + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); +2:563,570c + <<<<<<< HEAD + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); + ||||||| 47dea2ee + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); + ======= + @SuppressWarnings("unchecked") + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); + >>>>>>> TEMP_RIGHT_BRANCH +3:536,537c + @SuppressWarnings("unchecked") + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); +====1 +1:495a +2:575,576c +3:542,543c + int existingAdaptersCount = threadCalls.size(); + boolean foundCandidate = false; +====1 +1:497c + FutureTypeAdapter call = new FutureTypeAdapter(); +2:578c +3:545c + FutureTypeAdapter call = new FutureTypeAdapter<>(); +====2 +1:502a +3:550a +2:584,590c + @SuppressWarnings("unchecked") + TypeAdapter existingAdapter = (TypeAdapter) typeTokenCache.putIfAbsent(type, candidate); + // If other thread concurrently added adapter prefer that one instead + if (existingAdapter != null) { + candidate = existingAdapter; + } + +==== +1:504c + typeTokenCache.put(type, candidate); +2:592,609c + <<<<<<< HEAD + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + } + foundCandidate = true; + ||||||| 47dea2ee + typeTokenCache.put(type, candidate); + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:552,570c + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + + @SuppressWarnings("unchecked") + TypeAdapter actualAdapter = (TypeAdapter) typeTokenCache.get(type); + // Prefer the actual adapter, in case putIfAbsent call above had no effect because other + // thread already concurrently added other adapter instance for the same type + candidate = actualAdapter; + } + foundCandidate = true; +====1 +1:510,512c + threadCalls.remove(type); + + if (requiresThreadLocalCleanup) { +2:615c +3:576c + if (isInitialAdapterRequest) { +==== +1:514a +2:618,635c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +3:579,596c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +====1 +1:607c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:728c +3:689c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:611,612c + * @param src the object for which Json representation is to be created setting for Gson + * @return Json representation of {@code src}. +2:732,733c +3:693,694c + * @param src the object for which JSON representation is to be created + * @return JSON representation of {@code src}. +====1 +1:613a +2:735,736c +3:696,697c + * + * @see #toJsonTree(Object, Type) +====1 +1:636a +2:760,761c +3:721,722c + * + * @see #toJsonTree(Object) +====1 +1:645c + * This method serializes the specified object into its equivalent Json representation. +2:770c +3:731c + * This method serializes the specified object into its equivalent JSON representation. +====1 +1:649c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:774c +3:735c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:654c + * @param src the object for which Json representation is to be created setting for Gson +2:779c +3:740c + * @param src the object for which JSON representation is to be created +====1 +1:655a +2:781,783c +3:742,744c + * + * @see #toJson(Object, Appendable) + * @see #toJson(Object, Type) +====1 +1:666c + * equivalent Json representation. This method must be used if the specified object is a generic +2:794c +3:755c + * equivalent JSON representation. This method must be used if the specified object is a generic +====1 +1:677c + * @return Json representation of {@code src} +2:805,808c +3:766,769c + * @return JSON representation of {@code src} + * + * @see #toJson(Object, Type, Appendable) + * @see #toJson(Object) +====1 +1:686c + * This method serializes the specified object into its equivalent Json representation. +2:817,818c +3:778,779c + * This method serializes the specified object into its equivalent JSON representation and + * writes it to the writer. +====1 +1:690c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:822c +3:783c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:694,695c + * @param src the object for which Json representation is to be created setting for Gson + * @param writer Writer to which the Json representation needs to be written +2:826,827c +3:787,788c + * @param src the object for which JSON representation is to be created + * @param writer Writer to which the JSON representation needs to be written +====1 +1:697a +2:830,832c +3:791,793c + * + * @see #toJson(Object) + * @see #toJson(Object, Type, Appendable) +====1 +1:709,710c + * equivalent Json representation. This method must be used if the specified object is a generic + * type. For non-generic objects, use {@link #toJson(Object, Appendable)} instead. +2:844,846c +3:805,807c + * equivalent JSON representation and writes it to the writer. + * This method must be used if the specified object is a generic type. For non-generic objects, + * use {@link #toJson(Object, Appendable)} instead. +====1 +1:719c + * @param writer Writer to which the Json representation of src needs to be written. +2:855c +3:816c + * @param writer Writer to which the JSON representation of src needs to be written. +====1 +1:721a +2:858,860c +3:819,821c + * + * @see #toJson(Object, Type) + * @see #toJson(Object, Appendable) +====1 +1:734a +2:874,882c +3:835,843c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====1 +1:737c + @SuppressWarnings("unchecked") +2:884a +3:845a +====1 +1:739c + TypeAdapter adapter = getAdapter(TypeToken.get(typeOfSrc)); +2:886,887c +3:847,848c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) getAdapter(TypeToken.get(typeOfSrc)); +====1 +1:747c + ((TypeAdapter) adapter).write(writer, src); +2:895c +3:856c + adapter.write(writer, src); +====1 +1:778c + * @param writer Writer to which the Json representation needs to be written +2:926c +3:887c + * @param writer Writer to which the JSON representation needs to be written +====1 +1:832a +2:981,989c +3:942,950c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====1 +1:858c + * This method deserializes the specified Json into an object of the specified class. It is not +2:1015c +3:976c + * This method deserializes the specified JSON into an object of the specified class. It is not +====1 +1:864c + * {@link #fromJson(String, Type)}. If you have the Json in a {@link Reader} instead of +2:1021c +3:982c + * {@link #fromJson(String, TypeToken)}. If you have the JSON in a {@link Reader} instead of +====1 +1:866a +2:1024,1026c +3:985,987c + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:873a +2:1034,1036c +3:995,997c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(String, TypeToken) +====1 +1:876c + Object object = fromJson(json, (Type) classOfT); +2:1039c +3:1000c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:881c + * This method deserializes the specified Json into an object of the specified type. This method +2:1044c +3:1005c + * This method deserializes the specified JSON into an object of the specified type. This method +====1 +1:883c + * {@link #fromJson(String, Class)} instead. If you have the Json in a {@link Reader} instead of +2:1046c +3:1007c + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of +====1 +1:885a +2:1049,1082c +3:1010,1043c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(String, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, + * or if there is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is + * not desired. + * + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the string. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(String, Class) + * @see #fromJson(String, TypeToken) + */ + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the specified JSON into an object of the specified type. This method + * is useful if the specified object is a generic type. For non-generic objects, use + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of + * a String, use {@link #fromJson(Reader, TypeToken)} instead. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * +====1 +1:888,889c + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for +2:1085,1086c +3:1046,1047c + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for +====1 +1:892c + * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType(); +2:1089c +3:1050c + * new TypeToken<Collection<Foo>>(){} +====1 +1:896,897c + * @throws JsonParseException if json is not a valid representation for an object of type typeOfT + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1093,1097c +3:1054,1058c + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(String, Class) + * @since 2.10 +====1 +1:899,900c + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { +2:1099c +3:1060c + public T fromJson(String json, TypeToken typeOfT) throws JsonSyntaxException { +====1 +1:905,906c + T target = (T) fromJson(reader, typeOfT); + return target; +2:1104c +3:1065c + return fromJson(reader, typeOfT); +====1 +1:910c + * This method deserializes the Json read from the specified reader into an object of the +2:1108c +3:1069c + * This method deserializes the JSON read from the specified reader into an object of the +====1 +1:914c + * this method works fine if the any of the fields of the specified object are generics, just the +2:1112c +3:1073c + * this method works fine if any of the fields of the specified object are generics, just the +====1 +1:916c + * invoke {@link #fromJson(Reader, Type)}. If you have the Json in a String form instead of a +2:1114c +3:1075c + * invoke {@link #fromJson(Reader, TypeToken)}. If you have the JSON in a String form instead of a +====1 +1:918a +2:1117,1119c +3:1078,1080c + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:920c + * @param json the reader producing the Json from which the object is to be deserialized. +2:1121c +3:1082c + * @param json the reader producing the JSON from which the object is to be deserialized. +====1 +1:922c + * @return an object of type T from the string. Returns {@code null} if {@code json} is at EOF. +2:1123c +3:1084c + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====1 +1:924c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1125c +3:1086c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====1 +1:925a +2:1127,1129c +3:1088,1090c + * + * @see #fromJson(String, Class) + * @see #fromJson(Reader, TypeToken) +====1 +1:928,930c + JsonReader jsonReader = newJsonReader(json); + Object object = fromJson(jsonReader, classOfT); + assertFullConsumption(object, jsonReader); +2:1132c +3:1093c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:935c + * This method deserializes the Json read from the specified reader into an object of the +2:1137c +3:1098c + * This method deserializes the JSON read from the specified reader into an object of the +====1 +1:937c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the Json in a +2:1139c +3:1100c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a +====1 +1:939a +2:1142,1149c +3:1103,1110c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(Reader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:941,948c + * @param json the reader producing Json from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is at EOF. +2:1151,1153c +3:1112,1114c + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====1 +1:950c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1155c +3:1116c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====1 +1:951a +2:1157,1160c +3:1118,1121c + * + * @see #fromJson(String, Type) + * @see #fromJson(Reader, Class) + * @see #fromJson(Reader, TypeToken) +====1 +1:954a +2:1164,1192c +3:1125,1153c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified reader into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a + * String form instead of a {@link Reader}, use {@link #fromJson(String, TypeToken)} instead. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * + * @param the type of the desired object + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. + * @throws JsonIOException if there was a problem reading from the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type of typeOfT + * + * @see #fromJson(String, TypeToken) + * @see #fromJson(Reader, Class) + * @since 2.10 + */ + public T fromJson(Reader json, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====1 +1:956c + T object = (T) fromJson(jsonReader, typeOfT); +2:1194c +3:1155c + T object = fromJson(jsonReader, typeOfT); +====1 +1:964c + throw new JsonIOException("JSON document was not fully consumed."); +2:1202c +3:1163c + throw new JsonSyntaxException("JSON document was not fully consumed."); +====1 +1:972a +2:1211,1213c +3:1172,1174c + // fromJson(JsonReader, Class) is unfortunately missing and cannot be added now without breaking + // source compatibility in certain cases, see https://github.com/google/gson/pull/1700#discussion_r973764414 + +====1 +1:974c + * Reads the next JSON value from {@code reader} and convert it to an object +2:1215c +3:1176c + * Reads the next JSON value from {@code reader} and converts it to an object +====1 +1:976c + * Since Type is not parameterized by T, this method is type unsafe and should be used carefully +2:1216a +3:1177a +====1 +1:978,979c + * @throws JsonIOException if there was a problem writing to the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1218,1239c +3:1179,1200c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonReader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. If the provided type is a + * {@code Class} the {@code TypeToken} can be created with {@link TypeToken#get(Class)}. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonReader, TypeToken) +====1 +1:982a +2:1243,1276c +3:1204,1237c + return (T) fromJson(reader, TypeToken.get(typeOfT)); + } + + /** + * Reads the next JSON value from {@code reader} and converts it to an object + * of type {@code typeOfT}. Returns {@code null}, if the {@code reader} is at EOF. + * This method is useful if the specified object is a generic type. For non-generic objects, + * {@link #fromJson(JsonReader, Type)} can be called, or {@link TypeToken#get(Class)} can + * be used to create the type token. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonReader, Type) + * @since 2.10 + */ + public T fromJson(JsonReader reader, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====1 +1:989,990c + TypeToken typeToken = (TypeToken) TypeToken.get(typeOfT); + TypeAdapter typeAdapter = getAdapter(typeToken); +2:1283c +3:1244c + TypeAdapter typeAdapter = getAdapter(typeOfT); +====1 +1:1017c + * This method deserializes the Json read from the specified parse tree into an object of the +2:1310c +3:1271c + * This method deserializes the JSON read from the specified parse tree into an object of the +====1 +1:1021c + * this method works fine if the any of the fields of the specified object are generics, just the +2:1314c +3:1275c + * this method works fine if any of the fields of the specified object are generics, just the +====1 +1:1023c + * invoke {@link #fromJson(JsonElement, Type)}. +2:1316,1317c +3:1277,1278c + * invoke {@link #fromJson(JsonElement, TypeToken)}. + * +====1 +1:1028c + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +2:1322c +3:1283c + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====1 +1:1030c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +2:1324c +3:1285c + * @throws JsonSyntaxException if json is not a valid representation for an object of type classOfT +====1 +1:1031a +2:1326,1328c +3:1287,1289c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(JsonElement, TypeToken) +====1 +1:1034c + Object object = fromJson(json, (Type) classOfT); +2:1331c +3:1292c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:1039c + * This method deserializes the Json read from the specified parse tree into an object of the +2:1336c +3:1297c + * This method deserializes the JSON read from the specified parse tree into an object of the +====1 +1:1042a +2:1340,1344c +3:1301,1305c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonElement, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * +====1 +1:1046,1052c + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +2:1348,1349c +3:1309,1310c + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====1 +1:1055a +2:1353,1356c +3:1314,1317c + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonElement, Class) + * @see #fromJson(JsonElement, TypeToken) +====1 +1:1058a +2:1360,1385c +3:1321,1346c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified parse tree into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(JsonElement, Class)} instead. + * + * @param the type of the desired object + * @param json the root of the parse tree of {@link JsonElement}s from which the object is to + * be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *
    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonElement, Class) + * @since 2.10 + */ + public T fromJson(JsonElement json, TypeToken typeOfT) throws JsonSyntaxException { +====1 +1:1062c + return (T) fromJson(new JsonTreeReader(json), typeOfT); +2:1389c +3:1350c + return fromJson(new JsonTreeReader(json), typeOfT); +==== +1:1065,1066c + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate; +2:1392,1402c + <<<<<<< HEAD + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; + ||||||| 47dea2ee + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate; + ======= + static class FutureTypeAdapter extends SerializationDelegatingTypeAdapter { + private TypeAdapter delegate; + >>>>>>> TEMP_RIGHT_BRANCH +3:1353,1355c + static class FutureTypeAdapter extends SerializationDelegatingTypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; +==== +1:1075c + @Override public T read(JsonReader in) throws IOException { +2:1411,1430c + <<<<<<< HEAD + public void markBroken() { + isBroken = true; + } + + private TypeAdapter getResolvedDelegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + ||||||| 47dea2ee + @Override public T read(JsonReader in) throws IOException { + if (delegate == null) { + throw new IllegalStateException(); + ======= + private TypeAdapter delegate() { + if (delegate == null) { + throw new IllegalStateException("Delegate has not been set yet"); + >>>>>>> TEMP_RIGHT_BRANCH + } + <<<<<<< HEAD +3:1364,1372c + public void markBroken() { + isBroken = true; + } + + private TypeAdapter delegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } +====1 +1:1077c + throw new IllegalStateException(); +2:1432,1435c +3:1374,1377c + // Can occur when adapter is leaked to other thread or when adapter is used for (de-)serialization + // directly within the TypeAdapterFactory which requested it + throw new IllegalStateException("Adapter for type with cyclic dependency has been used" + + " before dependency has been resolved"); +==== +1:1079c + return delegate.read(in); +2:1437,1454c + return delegate; + } + + @Override public T read(JsonReader in) throws IOException { + return getResolvedDelegate().read(in); + ||||||| 47dea2ee + return delegate.read(in); + ======= + return delegate; + } + + @Override public TypeAdapter getSerializationDelegate() { + return delegate(); + } + + @Override public T read(JsonReader in) throws IOException { + return delegate().read(in); + >>>>>>> TEMP_RIGHT_BRANCH +3:1379,1387c + return delegate; + } + + @Override public TypeAdapter getSerializationDelegate() { + return delegate(); + } + + @Override public T read(JsonReader in) throws IOException { + return delegate().read(in); +==== +1:1083,1086c + if (delegate == null) { + throw new IllegalStateException(); + } + delegate.write(out, value); +2:1458,1467c + <<<<<<< HEAD + getResolvedDelegate().write(out, value); + ||||||| 47dea2ee + if (delegate == null) { + throw new IllegalStateException(); + } + delegate.write(out, value); + ======= + delegate().write(out, value); + >>>>>>> TEMP_RIGHT_BRANCH +3:1391c + delegate().write(out, value); diff --git a/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort/diff_GsonTest.java.txt b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort/diff_GsonTest.java.txt new file mode 100644 index 0000000000..ababbc30f8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort/diff_GsonTest.java.txt @@ -0,0 +1,897 @@ +====1 +1:18a +2:19c +3:19c + import com.google.gson.Gson.FutureTypeAdapter; +====1 +1:19a +2:21c +3:21c + import com.google.gson.reflect.TypeToken; +====1 +1:29a +2:32c +3:32c + import java.util.Collections; +==== +1:30a +2:34,40c + <<<<<<< HEAD + import java.util.concurrent.CountDownLatch; + ||||||| 47dea2ee + ======= + import java.util.concurrent.atomic.AtomicInteger; + >>>>>>> TEMP_RIGHT_BRANCH + import java.util.concurrent.atomic.AtomicReference; +3:34,36c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.atomic.AtomicInteger; + import java.util.concurrent.atomic.AtomicReference; +====1 +1:59c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +2:69,70c +3:65,66c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====1 +1:73c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +2:84,85c +3:80,81c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +==== +1:88a +2:101,163c + public void testGetAdapter_Null() { + Gson gson = new Gson(); + try { + gson.getAdapter((TypeToken) null); + fail(); + } catch (NullPointerException e) { + assertEquals("type must not be null", e.getMessage()); + } + } + + public void testGetAdapter_Concurrency() { + class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for test"); + } + } + + final AtomicInteger adapterInstancesCreated = new AtomicInteger(0); + final AtomicReference> threadAdapter = new AtomicReference<>(); + final Class requestedType = Number.class; + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + private volatile boolean isFirstCall = true; + + @Override public TypeAdapter create(final Gson gson, TypeToken type) { + if (isFirstCall) { + isFirstCall = false; + + // Create a separate thread which requests an adapter for the same type + // This will cause this factory to return a different adapter instance than + // the one it is currently creating + Thread thread = new Thread() { + @Override public void run() { + threadAdapter.set(gson.getAdapter(requestedType)); + } + }; + thread.start(); + try { + thread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + // Create a new dummy adapter instance + adapterInstancesCreated.incrementAndGet(); + return new DummyAdapter<>(); + } + }) + .create(); + + TypeAdapter adapter = gson.getAdapter(requestedType); + assertTrue(adapter instanceof DummyAdapter); + assertEquals(2, adapterInstancesCreated.get()); + // Should be the same adapter instance the concurrent thread received + assertSame(threadAdapter.get(), adapter); + } + +3:97,149c + public void testGetAdapter_Null() { + Gson gson = new Gson(); + try { + gson.getAdapter((TypeToken) null); + fail(); + } catch (NullPointerException e) { + assertEquals("type must not be null", e.getMessage()); + } + } + + public void testGetAdapter_Concurrency() { + final AtomicInteger adapterInstancesCreated = new AtomicInteger(0); + final AtomicReference> threadAdapter = new AtomicReference<>(); + final Class requestedType = Number.class; + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + private volatile boolean isFirstCall = true; + + @Override public TypeAdapter create(final Gson gson, TypeToken type) { + if (isFirstCall) { + isFirstCall = false; + + // Create a separate thread which requests an adapter for the same type + // This will cause this factory to return a different adapter instance than + // the one it is currently creating + Thread thread = new Thread() { + @Override public void run() { + threadAdapter.set(gson.getAdapter(requestedType)); + } + }; + thread.start(); + try { + thread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + // Create a new dummy adapter instance + adapterInstancesCreated.incrementAndGet(); + return new DummyAdapter<>(); + } + }) + .create(); + + TypeAdapter adapter = gson.getAdapter(requestedType); + assertTrue(adapter instanceof DummyAdapter); + assertEquals(2, adapterInstancesCreated.get()); + // Should be the same adapter instance the concurrent thread received + assertSame(threadAdapter.get(), adapter); + } + +==== +1:154a +2:230,593c + + /** + <<<<<<< HEAD + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + if (callCount == 0) { + callCount++; + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + ||||||| 47dea2ee + ======= + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a + * {@code new Gson()} should not affect the Gson instance it came from. + */ + public void testDefaultGsonNewBuilderModification() { + Gson gson = new Gson(); + GsonBuilder gsonBuilder = gson.newBuilder(); + + // Modifications of `gsonBuilder` should not affect `gson` object + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }); + + assertDefaultGson(gson); + // New GsonBuilder created from `gson` should not have been affected by changes either + assertDefaultGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should use custom adapters + assertCustomGson(gsonBuilder.create()); + } + + private static void assertDefaultGson(Gson gson) { + // Should use default reflective adapter + String json1 = gson.toJson(new CustomClass1()); + assertEquals("{}", json1); + + // Should use default reflective adapter + String json2 = gson.toJson(new CustomClass2()); + assertEquals("{}", json2); + + // Should use default instance creator + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals(CustomClass3.NO_ARG_CONSTRUCTOR_VALUE, customClass3.s); + } + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a custom + * Gson instance (created using a GsonBuilder) should not affect the Gson instance + * it came from. + */ + public void testNewBuilderModification() { + Gson gson = new GsonBuilder() + .registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }) + .registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }) + .registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }) + .create(); + + assertCustomGson(gson); + + // Modify `gson.newBuilder()` + GsonBuilder gsonBuilder = gson.newBuilder(); + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("overwritten custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("overwritten custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("overwritten custom-instance"); + } + }); + + // `gson` object should not have been affected by changes to new GsonBuilder + assertCustomGson(gson); + // New GsonBuilder based on `gson` should not have been affected either + assertCustomGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should be affected by changes + Gson otherGson = gsonBuilder.create(); + String json1 = otherGson.toJson(new CustomClass1()); + assertEquals("\"overwritten custom-adapter\"", json1); + + String json2 = otherGson.toJson(new CustomClass2()); + assertEquals("\"overwritten custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = otherGson.fromJson("{}", CustomClass3.class); + assertEquals("overwritten custom-instance", customClass3.s); + } + + private static void assertCustomGson(Gson gson) { + String json1 = gson.toJson(new CustomClass1()); + assertEquals("\"custom-adapter\"", json1); + + String json2 = gson.toJson(new CustomClass2()); + assertEquals("\"custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals("custom-instance", customClass3.s); + } + + static class CustomClass1 { } + static class CustomClass2 { } + static class CustomClass3 { + static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance"; + + final String s; + + public CustomClass3(String s) { + this.s = s; + } + + public CustomClass3() { + this(NO_ARG_CONSTRUCTOR_VALUE); + } + >>>>>>> TEMP_RIGHT_BRANCH + } +3:216,581c + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a + * {@code new Gson()} should not affect the Gson instance it came from. + */ + public void testDefaultGsonNewBuilderModification() { + Gson gson = new Gson(); + GsonBuilder gsonBuilder = gson.newBuilder(); + + // Modifications of `gsonBuilder` should not affect `gson` object + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }); + + assertDefaultGson(gson); + // New GsonBuilder created from `gson` should not have been affected by changes either + assertDefaultGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should use custom adapters + assertCustomGson(gsonBuilder.create()); + } + + private static void assertDefaultGson(Gson gson) { + // Should use default reflective adapter + String json1 = gson.toJson(new CustomClass1()); + assertEquals("{}", json1); + + // Should use default reflective adapter + String json2 = gson.toJson(new CustomClass2()); + assertEquals("{}", json2); + + // Should use default instance creator + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals(CustomClass3.NO_ARG_CONSTRUCTOR_VALUE, customClass3.s); + } + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a custom + * Gson instance (created using a GsonBuilder) should not affect the Gson instance + * it came from. + */ + public void testNewBuilderModification() { + Gson gson = new GsonBuilder() + .registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }) + .registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }) + .registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }) + .create(); + + assertCustomGson(gson); + + // Modify `gson.newBuilder()` + GsonBuilder gsonBuilder = gson.newBuilder(); + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("overwritten custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("overwritten custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("overwritten custom-instance"); + } + }); + + // `gson` object should not have been affected by changes to new GsonBuilder + assertCustomGson(gson); + // New GsonBuilder based on `gson` should not have been affected either + assertCustomGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should be affected by changes + Gson otherGson = gsonBuilder.create(); + String json1 = otherGson.toJson(new CustomClass1()); + assertEquals("\"overwritten custom-adapter\"", json1); + + String json2 = otherGson.toJson(new CustomClass2()); + assertEquals("\"overwritten custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = otherGson.fromJson("{}", CustomClass3.class); + assertEquals("overwritten custom-instance", customClass3.s); + } + + private static void assertCustomGson(Gson gson) { + String json1 = gson.toJson(new CustomClass1()); + assertEquals("\"custom-adapter\"", json1); + + String json2 = gson.toJson(new CustomClass2()); + assertEquals("\"custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals("custom-instance", customClass3.s); + } + + static class CustomClass1 { } + static class CustomClass2 { } + static class CustomClass3 { + static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance"; + + final String s; + + public CustomClass3(String s) { + this.s = s; + } + + public CustomClass3() { + this(NO_ARG_CONSTRUCTOR_VALUE); + } + } + + /** + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + try { + if (callCount++ == 0) { + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } finally { + callCount--; + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + } diff --git a/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_adjacent/diff_Gson.java.txt b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_adjacent/diff_Gson.java.txt new file mode 100644 index 0000000000..979214efc7 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_adjacent/diff_Gson.java.txt @@ -0,0 +1,1124 @@ +====1 +1:19,37c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; + +2:18a +3:18a +====3 +1:53a +2:34a +3:35c + import com.google.gson.internal.bind.SerializationDelegatingTypeAdapter; +==== +1:60a +2:42,61c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +3:43,63c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +====3 +1:78c +2:79c + * String json = gson.toJson(target); // serializes target to Json +3:81c + * String json = gson.toJson(target); // serializes target to JSON +====3 +1:82,86c +2:83,87c + *

    If the object that your are serializing/deserializing is a {@code ParameterizedType} + * (i.e. contains at least one type parameter and may be an array) then you must use the + * {@link #toJson(Object, Type)} or {@link #fromJson(String, Type)} method. Here is an + * example for serializing and deserializing a {@code ParameterizedType}: + * +3:85,91c + *

    If the type of the object that you are converting is a {@code ParameterizedType} + * (i.e. has at least one type argument, for example {@code List}) then for + * deserialization you must use a {@code fromJson} method with {@link Type} or {@link TypeToken} + * parameter to specify the parameterized type. For serialization specifying a {@code Type} + * or {@code TypeToken} is optional, otherwise Gson will use the runtime type of the object. + * {@link TypeToken} is a class provided by Gson which helps creating parameterized types. + * Here is an example showing how this can be done: +====3 +1:88,90c +2:89,91c + * Type listType = new TypeToken<List<String>>() {}.getType(); + * List<String> target = new LinkedList<String>(); + * target.add("blah"); +3:93,95c + * TypeToken<List<MyType>> listType = new TypeToken<List<MyType>>() {}; + * List<MyType> target = new LinkedList<MyType>(); + * target.add(new MyType(1, "abc")); +====3 +1:93,94c +2:94,95c + * String json = gson.toJson(target, listType); + * List<String> target2 = gson.fromJson(json, listType); +3:98,103c + * // For serialization you normally do not have to specify the type, Gson will use + * // the runtime type of the objects, however you can also specify it explicitly + * String json = gson.toJson(target, listType.getType()); + * + * // But for deserialization you have to specify the type + * List<MyType> target2 = gson.fromJson(json, listType); +====3 +1:97c +2:98c + *

    See the Gson User Guide +3:106c + *

    See the Gson User Guide +====3 +1:100c +2:101c + * @see com.google.gson.reflect.TypeToken +3:109,136c + *

    Lenient JSON handling

    + * For legacy reasons most of the {@code Gson} methods allow JSON data which does not + * comply with the JSON specification, regardless of whether {@link GsonBuilder#setLenient()} + * is used or not. If this behavior is not desired, the following workarounds can be used: + * + *

    Serialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be serialized + *
    2. When using an existing {@code JsonWriter}, manually apply the writer settings of this + * {@code Gson} instance listed by {@link #newJsonWriter(Writer)}.
      + * Otherwise, when not using an existing {@code JsonWriter}, use {@link #newJsonWriter(Writer)} + * to construct one. + *
    3. Call {@link TypeAdapter#write(JsonWriter, Object)} + *
    + * + *

    Deserialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be deserialized + *
    2. When using an existing {@code JsonReader}, manually apply the reader settings of this + * {@code Gson} instance listed by {@link #newJsonReader(Reader)}.
      + * Otherwise, when not using an existing {@code JsonReader}, use {@link #newJsonReader(Reader)} + * to construct one. + *
    3. Call {@link TypeAdapter#read(JsonReader)} + *
    4. Call {@link JsonReader#peek()} and verify that the result is {@link JsonToken#END_DOCUMENT} + * to make sure there is no trailing data + *
    + * + * @see TypeToken +====3 +1:120c +2:121c + private static final TypeToken NULL_KEY_SURROGATE = TypeToken.get(Object.class); +3:155a +====1 +1:130,131c + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal, FutureTypeAdapter>>(); +2:131,132c +3:165,166c + // Uses LinkedHashMap because iteration order is important, see getAdapter() implementation below + private final ThreadLocal, TypeAdapter>> calls = new ThreadLocal<>(); +==== +1:133c + private final Map, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); +2:134c + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); +3:168c + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap<>(); +====3 +1:158a +2:159a +3:194c + final List reflectionFilters; +====3 +1:185c +2:186c + *
  • The default field naming policy for the output Json is same as in Java. So, a Java class +3:221c + *
  • The default field naming policy for the output JSON is same as in Java. So, a Java class +====3 +1:187c +2:188c + * Json. The same rules are applied for mapping incoming Json to the Java classes. You can +3:223c + * JSON. The same rules are applied for mapping incoming JSON to the Java classes. You can +====3 +1:202c +2:203c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY); +3:238,239c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====3 +1:214c +2:215c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy) { +3:251,252c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy, + List reflectionFilters) { +====3 +1:218c +2:219c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe); +3:256c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe, reflectionFilters); +====3 +1:234a +2:235a +3:273c + this.reflectionFilters = reflectionFilters; +====3 +1:236c +2:237c + List factories = new ArrayList(); +3:275c + List factories = new ArrayList<>(); +====3 +1:299c +2:300c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory)); +3:338c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory, reflectionFilters)); +====3 +1:308a +2:309a +3:348c + * @since 2.8.3 +====3 +1:371c +2:372c + out.value(value); +3:411c + out.value(doubleValue); +====3 +1:395c +2:396c + out.value(value); +3:435,438c + // For backward compatibility don't call `JsonWriter.value(float)` because that method has + // been newly added and not all custom JsonWriter implementations might override it yet + Number floatNumber = value instanceof Float ? value : floatValue; + out.value(floatNumber); +====3 +1:452c +2:453c + List list = new ArrayList(); +3:495c + List list = new ArrayList<>(); +====3 +1:475c +2:476c + @SuppressWarnings("unchecked") +3:517a +====3 +1:477c +2:478c + TypeAdapter cached = typeTokenCache.get(type == null ? NULL_KEY_SURROGATE : type); +3:519,520c + Objects.requireNonNull(type, "type must not be null"); + TypeAdapter cached = typeTokenCache.get(type); +====3 +1:479c +2:480c + return (TypeAdapter) cached; +3:522,524c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) cached; + return adapter; +====1 +1:482,483c + Map, FutureTypeAdapter> threadCalls = calls.get(); + boolean requiresThreadLocalCleanup = false; +2:483,484c +3:527,528c + LinkedHashMap, TypeAdapter> threadCalls = calls.get(); + boolean isInitialAdapterRequest = false; +====1 +1:485c + threadCalls = new HashMap, FutureTypeAdapter>(); +2:486c +3:530c + threadCalls = new LinkedHashMap<>(); +====1 +1:487c + requiresThreadLocalCleanup = true; +2:488c +3:532c + isInitialAdapterRequest = true; +==== +1:491c + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); +2:492c + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); +3:536,537c + @SuppressWarnings("unchecked") + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); +====1 +1:495a +2:497,498c +3:542,543c + int existingAdaptersCount = threadCalls.size(); + boolean foundCandidate = false; +====1 +1:497c + FutureTypeAdapter call = new FutureTypeAdapter(); +2:500c +3:545c + FutureTypeAdapter call = new FutureTypeAdapter<>(); +==== +1:504c + typeTokenCache.put(type, candidate); +2:507,519c + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + } + foundCandidate = true; +3:552,570c + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + + @SuppressWarnings("unchecked") + TypeAdapter actualAdapter = (TypeAdapter) typeTokenCache.get(type); + // Prefer the actual adapter, in case putIfAbsent call above had no effect because other + // thread already concurrently added other adapter instance for the same type + candidate = actualAdapter; + } + foundCandidate = true; +====1 +1:510,512c + threadCalls.remove(type); + + if (requiresThreadLocalCleanup) { +2:525c +3:576c + if (isInitialAdapterRequest) { +==== +1:514a +2:528,545c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +3:579,596c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +====3 +1:607c +2:638c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +3:689c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====3 +1:611,612c +2:642,643c + * @param src the object for which Json representation is to be created setting for Gson + * @return Json representation of {@code src}. +3:693,694c + * @param src the object for which JSON representation is to be created + * @return JSON representation of {@code src}. +====3 +1:613a +2:644a +3:696,697c + * + * @see #toJsonTree(Object, Type) +====3 +1:636a +2:667a +3:721,722c + * + * @see #toJsonTree(Object) +====3 +1:645c +2:676c + * This method serializes the specified object into its equivalent Json representation. +3:731c + * This method serializes the specified object into its equivalent JSON representation. +====3 +1:649c +2:680c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +3:735c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====3 +1:654c +2:685c + * @param src the object for which Json representation is to be created setting for Gson +3:740c + * @param src the object for which JSON representation is to be created +====3 +1:655a +2:686a +3:742,744c + * + * @see #toJson(Object, Appendable) + * @see #toJson(Object, Type) +====3 +1:666c +2:697c + * equivalent Json representation. This method must be used if the specified object is a generic +3:755c + * equivalent JSON representation. This method must be used if the specified object is a generic +====3 +1:677c +2:708c + * @return Json representation of {@code src} +3:766,769c + * @return JSON representation of {@code src} + * + * @see #toJson(Object, Type, Appendable) + * @see #toJson(Object) +====3 +1:686c +2:717c + * This method serializes the specified object into its equivalent Json representation. +3:778,779c + * This method serializes the specified object into its equivalent JSON representation and + * writes it to the writer. +====3 +1:690c +2:721c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +3:783c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====3 +1:694,695c +2:725,726c + * @param src the object for which Json representation is to be created setting for Gson + * @param writer Writer to which the Json representation needs to be written +3:787,788c + * @param src the object for which JSON representation is to be created + * @param writer Writer to which the JSON representation needs to be written +====3 +1:697a +2:728a +3:791,793c + * + * @see #toJson(Object) + * @see #toJson(Object, Type, Appendable) +====3 +1:709,710c +2:740,741c + * equivalent Json representation. This method must be used if the specified object is a generic + * type. For non-generic objects, use {@link #toJson(Object, Appendable)} instead. +3:805,807c + * equivalent JSON representation and writes it to the writer. + * This method must be used if the specified object is a generic type. For non-generic objects, + * use {@link #toJson(Object, Appendable)} instead. +====3 +1:719c +2:750c + * @param writer Writer to which the Json representation of src needs to be written. +3:816c + * @param writer Writer to which the JSON representation of src needs to be written. +====3 +1:721a +2:752a +3:819,821c + * + * @see #toJson(Object, Type) + * @see #toJson(Object, Appendable) +====3 +1:734a +2:765a +3:835,843c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====3 +1:737c +2:768c + @SuppressWarnings("unchecked") +3:845a +====3 +1:739c +2:770c + TypeAdapter adapter = getAdapter(TypeToken.get(typeOfSrc)); +3:847,848c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) getAdapter(TypeToken.get(typeOfSrc)); +====3 +1:747c +2:778c + ((TypeAdapter) adapter).write(writer, src); +3:856c + adapter.write(writer, src); +====3 +1:778c +2:809c + * @param writer Writer to which the Json representation needs to be written +3:887c + * @param writer Writer to which the JSON representation needs to be written +====3 +1:832a +2:863a +3:942,950c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====3 +1:858c +2:889c + * This method deserializes the specified Json into an object of the specified class. It is not +3:976c + * This method deserializes the specified JSON into an object of the specified class. It is not +====3 +1:864c +2:895c + * {@link #fromJson(String, Type)}. If you have the Json in a {@link Reader} instead of +3:982c + * {@link #fromJson(String, TypeToken)}. If you have the JSON in a {@link Reader} instead of +====3 +1:866a +2:897a +3:985,987c + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====3 +1:873a +2:904a +3:995,997c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(String, TypeToken) +====3 +1:876c +2:907c + Object object = fromJson(json, (Type) classOfT); +3:1000c + T object = fromJson(json, TypeToken.get(classOfT)); +====3 +1:881c +2:912c + * This method deserializes the specified Json into an object of the specified type. This method +3:1005c + * This method deserializes the specified JSON into an object of the specified type. This method +====3 +1:883c +2:914c + * {@link #fromJson(String, Class)} instead. If you have the Json in a {@link Reader} instead of +3:1007c + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of +====3 +1:886,889c +2:917,920c + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for +3:1010,1047c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(String, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, + * or if there is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is + * not desired. + * + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the string. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(String, Class) + * @see #fromJson(String, TypeToken) + */ + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the specified JSON into an object of the specified type. This method + * is useful if the specified object is a generic type. For non-generic objects, use + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of + * a String, use {@link #fromJson(Reader, TypeToken)} instead. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for +====3 +1:892c +2:923c + * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType(); +3:1050c + * new TypeToken<Collection<Foo>>(){} +====3 +1:896,897c +2:927,928c + * @throws JsonParseException if json is not a valid representation for an object of type typeOfT + * @throws JsonSyntaxException if json is not a valid representation for an object of type +3:1054,1058c + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(String, Class) + * @since 2.10 +====3 +1:899,900c +2:930,931c + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { +3:1060c + public T fromJson(String json, TypeToken typeOfT) throws JsonSyntaxException { +====3 +1:905,906c +2:936,937c + T target = (T) fromJson(reader, typeOfT); + return target; +3:1065c + return fromJson(reader, typeOfT); +====3 +1:910c +2:941c + * This method deserializes the Json read from the specified reader into an object of the +3:1069c + * This method deserializes the JSON read from the specified reader into an object of the +====3 +1:914c +2:945c + * this method works fine if the any of the fields of the specified object are generics, just the +3:1073c + * this method works fine if any of the fields of the specified object are generics, just the +====3 +1:916c +2:947c + * invoke {@link #fromJson(Reader, Type)}. If you have the Json in a String form instead of a +3:1075c + * invoke {@link #fromJson(Reader, TypeToken)}. If you have the JSON in a String form instead of a +====3 +1:918a +2:949a +3:1078,1080c + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====3 +1:920c +2:951c + * @param json the reader producing the Json from which the object is to be deserialized. +3:1082c + * @param json the reader producing the JSON from which the object is to be deserialized. +====3 +1:922c +2:953c + * @return an object of type T from the string. Returns {@code null} if {@code json} is at EOF. +3:1084c + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====3 +1:924c +2:955c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +3:1086c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====3 +1:925a +2:956a +3:1088,1090c + * + * @see #fromJson(String, Class) + * @see #fromJson(Reader, TypeToken) +====3 +1:928,930c +2:959,961c + JsonReader jsonReader = newJsonReader(json); + Object object = fromJson(jsonReader, classOfT); + assertFullConsumption(object, jsonReader); +3:1093c + T object = fromJson(json, TypeToken.get(classOfT)); +====3 +1:935c +2:966c + * This method deserializes the Json read from the specified reader into an object of the +3:1098c + * This method deserializes the JSON read from the specified reader into an object of the +====3 +1:937c +2:968c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the Json in a +3:1100c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a +====3 +1:939a +2:970a +3:1103,1110c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(Reader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====3 +1:941,948c +2:972,979c + * @param json the reader producing Json from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is at EOF. +3:1112,1114c + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====3 +1:950c +2:981c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +3:1116c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====3 +1:951a +2:982a +3:1118,1121c + * + * @see #fromJson(String, Type) + * @see #fromJson(Reader, Class) + * @see #fromJson(Reader, TypeToken) +====3 +1:954a +2:985a +3:1125,1153c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified reader into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a + * String form instead of a {@link Reader}, use {@link #fromJson(String, TypeToken)} instead. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * + * @param the type of the desired object + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. + * @throws JsonIOException if there was a problem reading from the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type of typeOfT + * + * @see #fromJson(String, TypeToken) + * @see #fromJson(Reader, Class) + * @since 2.10 + */ + public T fromJson(Reader json, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====3 +1:956c +2:987c + T object = (T) fromJson(jsonReader, typeOfT); +3:1155c + T object = fromJson(jsonReader, typeOfT); +====3 +1:964c +2:995c + throw new JsonIOException("JSON document was not fully consumed."); +3:1163c + throw new JsonSyntaxException("JSON document was not fully consumed."); +====3 +1:972a +2:1003a +3:1172,1174c + // fromJson(JsonReader, Class) is unfortunately missing and cannot be added now without breaking + // source compatibility in certain cases, see https://github.com/google/gson/pull/1700#discussion_r973764414 + +====3 +1:974c +2:1005c + * Reads the next JSON value from {@code reader} and convert it to an object +3:1176c + * Reads the next JSON value from {@code reader} and converts it to an object +====3 +1:976c +2:1007c + * Since Type is not parameterized by T, this method is type unsafe and should be used carefully +3:1177a +====3 +1:978,979c +2:1009,1010c + * @throws JsonIOException if there was a problem writing to the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type +3:1179,1200c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonReader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. If the provided type is a + * {@code Class} the {@code TypeToken} can be created with {@link TypeToken#get(Class)}. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonReader, TypeToken) +====3 +1:982a +2:1013a +3:1204,1237c + return (T) fromJson(reader, TypeToken.get(typeOfT)); + } + + /** + * Reads the next JSON value from {@code reader} and converts it to an object + * of type {@code typeOfT}. Returns {@code null}, if the {@code reader} is at EOF. + * This method is useful if the specified object is a generic type. For non-generic objects, + * {@link #fromJson(JsonReader, Type)} can be called, or {@link TypeToken#get(Class)} can + * be used to create the type token. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonReader, Type) + * @since 2.10 + */ + public T fromJson(JsonReader reader, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====3 +1:989,990c +2:1020,1021c + TypeToken typeToken = (TypeToken) TypeToken.get(typeOfT); + TypeAdapter typeAdapter = getAdapter(typeToken); +3:1244c + TypeAdapter typeAdapter = getAdapter(typeOfT); +====3 +1:1017c +2:1048c + * This method deserializes the Json read from the specified parse tree into an object of the +3:1271c + * This method deserializes the JSON read from the specified parse tree into an object of the +====3 +1:1021c +2:1052c + * this method works fine if the any of the fields of the specified object are generics, just the +3:1275c + * this method works fine if any of the fields of the specified object are generics, just the +====3 +1:1023c +2:1054c + * invoke {@link #fromJson(JsonElement, Type)}. +3:1277,1278c + * invoke {@link #fromJson(JsonElement, TypeToken)}. + * +====3 +1:1028c +2:1059c + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +3:1283c + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====3 +1:1030c +2:1061c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +3:1285c + * @throws JsonSyntaxException if json is not a valid representation for an object of type classOfT +====3 +1:1031a +2:1062a +3:1287,1289c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(JsonElement, TypeToken) +====3 +1:1034c +2:1065c + Object object = fromJson(json, (Type) classOfT); +3:1292c + T object = fromJson(json, TypeToken.get(classOfT)); +====3 +1:1039c +2:1070c + * This method deserializes the Json read from the specified parse tree into an object of the +3:1297c + * This method deserializes the JSON read from the specified parse tree into an object of the +====3 +1:1042a +2:1073a +3:1301,1305c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonElement, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * +====3 +1:1046,1052c +2:1077,1083c + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +3:1309,1310c + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====3 +1:1055a +2:1086a +3:1314,1317c + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonElement, Class) + * @see #fromJson(JsonElement, TypeToken) +====3 +1:1058a +2:1089a +3:1321,1346c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified parse tree into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(JsonElement, Class)} instead. + * + * @param the type of the desired object + * @param json the root of the parse tree of {@link JsonElement}s from which the object is to + * be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *
    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonElement, Class) + * @since 2.10 + */ + public T fromJson(JsonElement json, TypeToken typeOfT) throws JsonSyntaxException { +====3 +1:1062c +2:1093c + return (T) fromJson(new JsonTreeReader(json), typeOfT); +3:1350c + return fromJson(new JsonTreeReader(json), typeOfT); +==== +1:1065,1066c + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate; +2:1096,1098c + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; +3:1353,1355c + static class FutureTypeAdapter extends SerializationDelegatingTypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; +==== +1:1075c + @Override public T read(JsonReader in) throws IOException { +2:1107,1115c + public void markBroken() { + isBroken = true; + } + + private TypeAdapter getResolvedDelegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } +3:1364,1372c + public void markBroken() { + isBroken = true; + } + + private TypeAdapter delegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } +====1 +1:1077c + throw new IllegalStateException(); +2:1117,1120c +3:1374,1377c + // Can occur when adapter is leaked to other thread or when adapter is used for (de-)serialization + // directly within the TypeAdapterFactory which requested it + throw new IllegalStateException("Adapter for type with cyclic dependency has been used" + + " before dependency has been resolved"); +==== +1:1079c + return delegate.read(in); +2:1122,1126c + return delegate; + } + + @Override public T read(JsonReader in) throws IOException { + return getResolvedDelegate().read(in); +3:1379,1387c + return delegate; + } + + @Override public TypeAdapter getSerializationDelegate() { + return delegate(); + } + + @Override public T read(JsonReader in) throws IOException { + return delegate().read(in); +==== +1:1083,1086c + if (delegate == null) { + throw new IllegalStateException(); + } + delegate.write(out, value); +2:1130c + getResolvedDelegate().write(out, value); +3:1391c + delegate().write(out, value); diff --git a/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_adjacent/diff_GsonTest.java.txt b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_adjacent/diff_GsonTest.java.txt new file mode 100644 index 0000000000..76854475ca --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_adjacent/diff_GsonTest.java.txt @@ -0,0 +1,681 @@ +====1 +1:18a +2:19c +3:19c + import com.google.gson.Gson.FutureTypeAdapter; +====1 +1:19a +2:21c +3:21c + import com.google.gson.reflect.TypeToken; +====3 +1:29a +2:31a +3:32c + import java.util.Collections; +==== +1:30a +2:33,34c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.atomic.AtomicReference; +3:34,36c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.atomic.AtomicInteger; + import java.util.concurrent.atomic.AtomicReference; +====3 +1:59c +2:63c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +3:65,66c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====3 +1:73c +2:77c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +3:80,81c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====3 +1:88a +2:92a +3:97,149c + public void testGetAdapter_Null() { + Gson gson = new Gson(); + try { + gson.getAdapter((TypeToken) null); + fail(); + } catch (NullPointerException e) { + assertEquals("type must not be null", e.getMessage()); + } + } + + public void testGetAdapter_Concurrency() { + final AtomicInteger adapterInstancesCreated = new AtomicInteger(0); + final AtomicReference> threadAdapter = new AtomicReference<>(); + final Class requestedType = Number.class; + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + private volatile boolean isFirstCall = true; + + @Override public TypeAdapter create(final Gson gson, TypeToken type) { + if (isFirstCall) { + isFirstCall = false; + + // Create a separate thread which requests an adapter for the same type + // This will cause this factory to return a different adapter instance than + // the one it is currently creating + Thread thread = new Thread() { + @Override public void run() { + threadAdapter.set(gson.getAdapter(requestedType)); + } + }; + thread.start(); + try { + thread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + // Create a new dummy adapter instance + adapterInstancesCreated.incrementAndGet(); + return new DummyAdapter<>(); + } + }) + .create(); + + TypeAdapter adapter = gson.getAdapter(requestedType); + assertTrue(adapter instanceof DummyAdapter); + assertEquals(2, adapterInstancesCreated.get()); + // Should be the same adapter instance the concurrent thread received + assertSame(threadAdapter.get(), adapter); + } + +==== +1:154a +2:159,374c + + /** + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + if (callCount == 0) { + callCount++; + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + } +3:216,581c + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a + * {@code new Gson()} should not affect the Gson instance it came from. + */ + public void testDefaultGsonNewBuilderModification() { + Gson gson = new Gson(); + GsonBuilder gsonBuilder = gson.newBuilder(); + + // Modifications of `gsonBuilder` should not affect `gson` object + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }); + + assertDefaultGson(gson); + // New GsonBuilder created from `gson` should not have been affected by changes either + assertDefaultGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should use custom adapters + assertCustomGson(gsonBuilder.create()); + } + + private static void assertDefaultGson(Gson gson) { + // Should use default reflective adapter + String json1 = gson.toJson(new CustomClass1()); + assertEquals("{}", json1); + + // Should use default reflective adapter + String json2 = gson.toJson(new CustomClass2()); + assertEquals("{}", json2); + + // Should use default instance creator + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals(CustomClass3.NO_ARG_CONSTRUCTOR_VALUE, customClass3.s); + } + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a custom + * Gson instance (created using a GsonBuilder) should not affect the Gson instance + * it came from. + */ + public void testNewBuilderModification() { + Gson gson = new GsonBuilder() + .registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }) + .registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }) + .registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }) + .create(); + + assertCustomGson(gson); + + // Modify `gson.newBuilder()` + GsonBuilder gsonBuilder = gson.newBuilder(); + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("overwritten custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("overwritten custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("overwritten custom-instance"); + } + }); + + // `gson` object should not have been affected by changes to new GsonBuilder + assertCustomGson(gson); + // New GsonBuilder based on `gson` should not have been affected either + assertCustomGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should be affected by changes + Gson otherGson = gsonBuilder.create(); + String json1 = otherGson.toJson(new CustomClass1()); + assertEquals("\"overwritten custom-adapter\"", json1); + + String json2 = otherGson.toJson(new CustomClass2()); + assertEquals("\"overwritten custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = otherGson.fromJson("{}", CustomClass3.class); + assertEquals("overwritten custom-instance", customClass3.s); + } + + private static void assertCustomGson(Gson gson) { + String json1 = gson.toJson(new CustomClass1()); + assertEquals("\"custom-adapter\"", json1); + + String json2 = gson.toJson(new CustomClass2()); + assertEquals("\"custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals("custom-instance", customClass3.s); + } + + static class CustomClass1 { } + static class CustomClass2 { } + static class CustomClass3 { + static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance"; + + final String s; + + public CustomClass3(String s) { + this.s = s; + } + + public CustomClass3() { + this(NO_ARG_CONSTRUCTOR_VALUE); + } + } + + /** + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + try { + if (callCount++ == 0) { + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } finally { + callCount--; + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + } diff --git a/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_ignorespace/diff_Gson.java.txt b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_ignorespace/diff_Gson.java.txt new file mode 100644 index 0000000000..357e7423f8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_ignorespace/diff_Gson.java.txt @@ -0,0 +1,1218 @@ +====1 +1:19,37c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; + +2:18a +3:18a +====1 +1:53a +2:35c +3:35c + import com.google.gson.internal.bind.SerializationDelegatingTypeAdapter; +==== +1:60a +2:43,70c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + <<<<<<< HEAD + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + ||||||| 47dea2ee + ======= + import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + >>>>>>> TEMP_RIGHT_BRANCH + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +3:43,63c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +====1 +1:78c + * String json = gson.toJson(target); // serializes target to Json +2:88c +3:81c + * String json = gson.toJson(target); // serializes target to JSON +====1 +1:82,86c + *

    If the object that your are serializing/deserializing is a {@code ParameterizedType} + * (i.e. contains at least one type parameter and may be an array) then you must use the + * {@link #toJson(Object, Type)} or {@link #fromJson(String, Type)} method. Here is an + * example for serializing and deserializing a {@code ParameterizedType}: + * +2:92,98c +3:85,91c + *

    If the type of the object that you are converting is a {@code ParameterizedType} + * (i.e. has at least one type argument, for example {@code List}) then for + * deserialization you must use a {@code fromJson} method with {@link Type} or {@link TypeToken} + * parameter to specify the parameterized type. For serialization specifying a {@code Type} + * or {@code TypeToken} is optional, otherwise Gson will use the runtime type of the object. + * {@link TypeToken} is a class provided by Gson which helps creating parameterized types. + * Here is an example showing how this can be done: +====1 +1:88,90c + * Type listType = new TypeToken<List<String>>() {}.getType(); + * List<String> target = new LinkedList<String>(); + * target.add("blah"); +2:100,102c +3:93,95c + * TypeToken<List<MyType>> listType = new TypeToken<List<MyType>>() {}; + * List<MyType> target = new LinkedList<MyType>(); + * target.add(new MyType(1, "abc")); +====1 +1:93,94c + * String json = gson.toJson(target, listType); + * List<String> target2 = gson.fromJson(json, listType); +2:105,110c +3:98,103c + * // For serialization you normally do not have to specify the type, Gson will use + * // the runtime type of the objects, however you can also specify it explicitly + * String json = gson.toJson(target, listType.getType()); + * + * // But for deserialization you have to specify the type + * List<MyType> target2 = gson.fromJson(json, listType); +====1 +1:97c + *

    See the Gson User Guide +2:113c +3:106c + *

    See the Gson User Guide +====1 +1:100c + * @see com.google.gson.reflect.TypeToken +2:116,143c +3:109,136c + *

    Lenient JSON handling

    + * For legacy reasons most of the {@code Gson} methods allow JSON data which does not + * comply with the JSON specification, regardless of whether {@link GsonBuilder#setLenient()} + * is used or not. If this behavior is not desired, the following workarounds can be used: + * + *

    Serialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be serialized + *
    2. When using an existing {@code JsonWriter}, manually apply the writer settings of this + * {@code Gson} instance listed by {@link #newJsonWriter(Writer)}.
      + * Otherwise, when not using an existing {@code JsonWriter}, use {@link #newJsonWriter(Writer)} + * to construct one. + *
    3. Call {@link TypeAdapter#write(JsonWriter, Object)} + *
    + * + *

    Deserialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be deserialized + *
    2. When using an existing {@code JsonReader}, manually apply the reader settings of this + * {@code Gson} instance listed by {@link #newJsonReader(Reader)}.
      + * Otherwise, when not using an existing {@code JsonReader}, use {@link #newJsonReader(Reader)} + * to construct one. + *
    3. Call {@link TypeAdapter#read(JsonReader)} + *
    4. Call {@link JsonReader#peek()} and verify that the result is {@link JsonToken#END_DOCUMENT} + * to make sure there is no trailing data + *
    + * + * @see TypeToken +====1 +1:120c + private static final TypeToken NULL_KEY_SURROGATE = TypeToken.get(Object.class); +2:162a +3:155a +==== +1:130,133c + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal, FutureTypeAdapter>>(); + + private final Map, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); +2:172,189c + <<<<<<< HEAD + // Uses LinkedHashMap because iteration order is important, see getAdapter() implementation below + private final ThreadLocal, TypeAdapter>> calls = new ThreadLocal<>(); + ||||||| 47dea2ee + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal, FutureTypeAdapter>>(); + ======= + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal<>(); + >>>>>>> TEMP_RIGHT_BRANCH + + <<<<<<< HEAD + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); + ||||||| 47dea2ee + private final Map, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); + ======= + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap<>(); + >>>>>>> TEMP_RIGHT_BRANCH +3:165,168c + // Uses LinkedHashMap because iteration order is important, see getAdapter() implementation below + private final ThreadLocal, TypeAdapter>> calls = new ThreadLocal<>(); + + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap<>(); +====1 +1:158a +2:215c +3:194c + final List reflectionFilters; +====1 +1:185c + *
  • The default field naming policy for the output Json is same as in Java. So, a Java class +2:242c +3:221c + *
  • The default field naming policy for the output JSON is same as in Java. So, a Java class +====1 +1:187c + * Json. The same rules are applied for mapping incoming Json to the Java classes. You can +2:244c +3:223c + * JSON. The same rules are applied for mapping incoming JSON to the Java classes. You can +====1 +1:202c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY); +2:259,260c +3:238,239c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====1 +1:214c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy) { +2:272,273c +3:251,252c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy, + List reflectionFilters) { +====1 +1:218c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe); +2:277c +3:256c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe, reflectionFilters); +====1 +1:234a +2:294c +3:273c + this.reflectionFilters = reflectionFilters; +====1 +1:236c + List factories = new ArrayList(); +2:296c +3:275c + List factories = new ArrayList<>(); +====1 +1:299c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory)); +2:359c +3:338c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory, reflectionFilters)); +====1 +1:308a +2:369c +3:348c + * @since 2.8.3 +====1 +1:371c + out.value(value); +2:432c +3:411c + out.value(doubleValue); +====1 +1:395c + out.value(value); +2:456,459c +3:435,438c + // For backward compatibility don't call `JsonWriter.value(float)` because that method has + // been newly added and not all custom JsonWriter implementations might override it yet + Number floatNumber = value instanceof Float ? value : floatValue; + out.value(floatNumber); +====1 +1:452c + List list = new ArrayList(); +2:516c +3:495c + List list = new ArrayList<>(); +====1 +1:475c + @SuppressWarnings("unchecked") +2:538a +3:517a +====1 +1:477c + TypeAdapter cached = typeTokenCache.get(type == null ? NULL_KEY_SURROGATE : type); +2:540,541c +3:519,520c + Objects.requireNonNull(type, "type must not be null"); + TypeAdapter cached = typeTokenCache.get(type); +====1 +1:479c + return (TypeAdapter) cached; +2:543,545c +3:522,524c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) cached; + return adapter; +====1 +1:482,483c + Map, FutureTypeAdapter> threadCalls = calls.get(); + boolean requiresThreadLocalCleanup = false; +2:548,549c +3:527,528c + LinkedHashMap, TypeAdapter> threadCalls = calls.get(); + boolean isInitialAdapterRequest = false; +==== +1:485c + threadCalls = new HashMap, FutureTypeAdapter>(); +2:551,557c + <<<<<<< HEAD + threadCalls = new LinkedHashMap<>(); + ||||||| 47dea2ee + threadCalls = new HashMap, FutureTypeAdapter>(); + ======= + threadCalls = new HashMap<>(); + >>>>>>> TEMP_RIGHT_BRANCH +3:530c + threadCalls = new LinkedHashMap<>(); +====1 +1:487c + requiresThreadLocalCleanup = true; +2:559c +3:532c + isInitialAdapterRequest = true; +==== +1:491c + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); +2:563,570c + <<<<<<< HEAD + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); + ||||||| 47dea2ee + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); + ======= + @SuppressWarnings("unchecked") + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); + >>>>>>> TEMP_RIGHT_BRANCH +3:536,537c + @SuppressWarnings("unchecked") + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); +====1 +1:495a +2:575,576c +3:542,543c + int existingAdaptersCount = threadCalls.size(); + boolean foundCandidate = false; +====1 +1:497c + FutureTypeAdapter call = new FutureTypeAdapter(); +2:578c +3:545c + FutureTypeAdapter call = new FutureTypeAdapter<>(); +====2 +1:502a +3:550a +2:584,590c + @SuppressWarnings("unchecked") + TypeAdapter existingAdapter = (TypeAdapter) typeTokenCache.putIfAbsent(type, candidate); + // If other thread concurrently added adapter prefer that one instead + if (existingAdapter != null) { + candidate = existingAdapter; + } + +==== +1:504c + typeTokenCache.put(type, candidate); +2:592,609c + <<<<<<< HEAD + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + } + foundCandidate = true; + ||||||| 47dea2ee + typeTokenCache.put(type, candidate); + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:552,570c + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + + @SuppressWarnings("unchecked") + TypeAdapter actualAdapter = (TypeAdapter) typeTokenCache.get(type); + // Prefer the actual adapter, in case putIfAbsent call above had no effect because other + // thread already concurrently added other adapter instance for the same type + candidate = actualAdapter; + } + foundCandidate = true; +====1 +1:510,512c + threadCalls.remove(type); + + if (requiresThreadLocalCleanup) { +2:615c +3:576c + if (isInitialAdapterRequest) { +==== +1:514a +2:618,635c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +3:579,596c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +====1 +1:607c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:728c +3:689c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:611,612c + * @param src the object for which Json representation is to be created setting for Gson + * @return Json representation of {@code src}. +2:732,733c +3:693,694c + * @param src the object for which JSON representation is to be created + * @return JSON representation of {@code src}. +====1 +1:613a +2:735,736c +3:696,697c + * + * @see #toJsonTree(Object, Type) +====1 +1:636a +2:760,761c +3:721,722c + * + * @see #toJsonTree(Object) +====1 +1:645c + * This method serializes the specified object into its equivalent Json representation. +2:770c +3:731c + * This method serializes the specified object into its equivalent JSON representation. +====1 +1:649c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:774c +3:735c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:654c + * @param src the object for which Json representation is to be created setting for Gson +2:779c +3:740c + * @param src the object for which JSON representation is to be created +====1 +1:655a +2:781,783c +3:742,744c + * + * @see #toJson(Object, Appendable) + * @see #toJson(Object, Type) +====1 +1:666c + * equivalent Json representation. This method must be used if the specified object is a generic +2:794c +3:755c + * equivalent JSON representation. This method must be used if the specified object is a generic +====1 +1:677c + * @return Json representation of {@code src} +2:805,808c +3:766,769c + * @return JSON representation of {@code src} + * + * @see #toJson(Object, Type, Appendable) + * @see #toJson(Object) +====1 +1:686c + * This method serializes the specified object into its equivalent Json representation. +2:817,818c +3:778,779c + * This method serializes the specified object into its equivalent JSON representation and + * writes it to the writer. +====1 +1:690c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:822c +3:783c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:694,695c + * @param src the object for which Json representation is to be created setting for Gson + * @param writer Writer to which the Json representation needs to be written +2:826,827c +3:787,788c + * @param src the object for which JSON representation is to be created + * @param writer Writer to which the JSON representation needs to be written +====1 +1:697a +2:830,832c +3:791,793c + * + * @see #toJson(Object) + * @see #toJson(Object, Type, Appendable) +====1 +1:709,710c + * equivalent Json representation. This method must be used if the specified object is a generic + * type. For non-generic objects, use {@link #toJson(Object, Appendable)} instead. +2:844,846c +3:805,807c + * equivalent JSON representation and writes it to the writer. + * This method must be used if the specified object is a generic type. For non-generic objects, + * use {@link #toJson(Object, Appendable)} instead. +====1 +1:719c + * @param writer Writer to which the Json representation of src needs to be written. +2:855c +3:816c + * @param writer Writer to which the JSON representation of src needs to be written. +====1 +1:721a +2:858,860c +3:819,821c + * + * @see #toJson(Object, Type) + * @see #toJson(Object, Appendable) +====1 +1:734a +2:874,882c +3:835,843c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====1 +1:737c + @SuppressWarnings("unchecked") +2:884a +3:845a +====1 +1:739c + TypeAdapter adapter = getAdapter(TypeToken.get(typeOfSrc)); +2:886,887c +3:847,848c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) getAdapter(TypeToken.get(typeOfSrc)); +====1 +1:747c + ((TypeAdapter) adapter).write(writer, src); +2:895c +3:856c + adapter.write(writer, src); +====1 +1:778c + * @param writer Writer to which the Json representation needs to be written +2:926c +3:887c + * @param writer Writer to which the JSON representation needs to be written +====1 +1:832a +2:981,989c +3:942,950c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====1 +1:858c + * This method deserializes the specified Json into an object of the specified class. It is not +2:1015c +3:976c + * This method deserializes the specified JSON into an object of the specified class. It is not +====1 +1:864c + * {@link #fromJson(String, Type)}. If you have the Json in a {@link Reader} instead of +2:1021c +3:982c + * {@link #fromJson(String, TypeToken)}. If you have the JSON in a {@link Reader} instead of +====1 +1:866a +2:1024,1026c +3:985,987c + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:873a +2:1034,1036c +3:995,997c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(String, TypeToken) +====1 +1:876c + Object object = fromJson(json, (Type) classOfT); +2:1039c +3:1000c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:881c + * This method deserializes the specified Json into an object of the specified type. This method +2:1044c +3:1005c + * This method deserializes the specified JSON into an object of the specified type. This method +====1 +1:883c + * {@link #fromJson(String, Class)} instead. If you have the Json in a {@link Reader} instead of +2:1046c +3:1007c + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of +====1 +1:885a +2:1049,1082c +3:1010,1043c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(String, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, + * or if there is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is + * not desired. + * + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the string. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(String, Class) + * @see #fromJson(String, TypeToken) + */ + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the specified JSON into an object of the specified type. This method + * is useful if the specified object is a generic type. For non-generic objects, use + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of + * a String, use {@link #fromJson(Reader, TypeToken)} instead. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * +====1 +1:888,889c + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for +2:1085,1086c +3:1046,1047c + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for +====1 +1:892c + * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType(); +2:1089c +3:1050c + * new TypeToken<Collection<Foo>>(){} +====1 +1:896,897c + * @throws JsonParseException if json is not a valid representation for an object of type typeOfT + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1093,1097c +3:1054,1058c + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(String, Class) + * @since 2.10 +====1 +1:899,900c + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { +2:1099c +3:1060c + public T fromJson(String json, TypeToken typeOfT) throws JsonSyntaxException { +====1 +1:905,906c + T target = (T) fromJson(reader, typeOfT); + return target; +2:1104c +3:1065c + return fromJson(reader, typeOfT); +====1 +1:910c + * This method deserializes the Json read from the specified reader into an object of the +2:1108c +3:1069c + * This method deserializes the JSON read from the specified reader into an object of the +====1 +1:914c + * this method works fine if the any of the fields of the specified object are generics, just the +2:1112c +3:1073c + * this method works fine if any of the fields of the specified object are generics, just the +====1 +1:916c + * invoke {@link #fromJson(Reader, Type)}. If you have the Json in a String form instead of a +2:1114c +3:1075c + * invoke {@link #fromJson(Reader, TypeToken)}. If you have the JSON in a String form instead of a +====1 +1:918a +2:1117,1119c +3:1078,1080c + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:920c + * @param json the reader producing the Json from which the object is to be deserialized. +2:1121c +3:1082c + * @param json the reader producing the JSON from which the object is to be deserialized. +====1 +1:922c + * @return an object of type T from the string. Returns {@code null} if {@code json} is at EOF. +2:1123c +3:1084c + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====1 +1:924c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1125c +3:1086c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====1 +1:925a +2:1127,1129c +3:1088,1090c + * + * @see #fromJson(String, Class) + * @see #fromJson(Reader, TypeToken) +====1 +1:928,930c + JsonReader jsonReader = newJsonReader(json); + Object object = fromJson(jsonReader, classOfT); + assertFullConsumption(object, jsonReader); +2:1132c +3:1093c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:935c + * This method deserializes the Json read from the specified reader into an object of the +2:1137c +3:1098c + * This method deserializes the JSON read from the specified reader into an object of the +====1 +1:937c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the Json in a +2:1139c +3:1100c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a +====1 +1:939a +2:1142,1149c +3:1103,1110c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(Reader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:941,948c + * @param json the reader producing Json from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is at EOF. +2:1151,1153c +3:1112,1114c + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====1 +1:950c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1155c +3:1116c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====1 +1:951a +2:1157,1160c +3:1118,1121c + * + * @see #fromJson(String, Type) + * @see #fromJson(Reader, Class) + * @see #fromJson(Reader, TypeToken) +====1 +1:954a +2:1164,1192c +3:1125,1153c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified reader into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a + * String form instead of a {@link Reader}, use {@link #fromJson(String, TypeToken)} instead. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * + * @param the type of the desired object + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. + * @throws JsonIOException if there was a problem reading from the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type of typeOfT + * + * @see #fromJson(String, TypeToken) + * @see #fromJson(Reader, Class) + * @since 2.10 + */ + public T fromJson(Reader json, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====1 +1:956c + T object = (T) fromJson(jsonReader, typeOfT); +2:1194c +3:1155c + T object = fromJson(jsonReader, typeOfT); +====1 +1:964c + throw new JsonIOException("JSON document was not fully consumed."); +2:1202c +3:1163c + throw new JsonSyntaxException("JSON document was not fully consumed."); +====1 +1:972a +2:1211,1213c +3:1172,1174c + // fromJson(JsonReader, Class) is unfortunately missing and cannot be added now without breaking + // source compatibility in certain cases, see https://github.com/google/gson/pull/1700#discussion_r973764414 + +====1 +1:974c + * Reads the next JSON value from {@code reader} and convert it to an object +2:1215c +3:1176c + * Reads the next JSON value from {@code reader} and converts it to an object +====1 +1:976c + * Since Type is not parameterized by T, this method is type unsafe and should be used carefully +2:1216a +3:1177a +====1 +1:978,979c + * @throws JsonIOException if there was a problem writing to the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1218,1239c +3:1179,1200c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonReader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. If the provided type is a + * {@code Class} the {@code TypeToken} can be created with {@link TypeToken#get(Class)}. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonReader, TypeToken) +====1 +1:982a +2:1243,1276c +3:1204,1237c + return (T) fromJson(reader, TypeToken.get(typeOfT)); + } + + /** + * Reads the next JSON value from {@code reader} and converts it to an object + * of type {@code typeOfT}. Returns {@code null}, if the {@code reader} is at EOF. + * This method is useful if the specified object is a generic type. For non-generic objects, + * {@link #fromJson(JsonReader, Type)} can be called, or {@link TypeToken#get(Class)} can + * be used to create the type token. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonReader, Type) + * @since 2.10 + */ + public T fromJson(JsonReader reader, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====1 +1:989,990c + TypeToken typeToken = (TypeToken) TypeToken.get(typeOfT); + TypeAdapter typeAdapter = getAdapter(typeToken); +2:1283c +3:1244c + TypeAdapter typeAdapter = getAdapter(typeOfT); +====1 +1:1017c + * This method deserializes the Json read from the specified parse tree into an object of the +2:1310c +3:1271c + * This method deserializes the JSON read from the specified parse tree into an object of the +====1 +1:1021c + * this method works fine if the any of the fields of the specified object are generics, just the +2:1314c +3:1275c + * this method works fine if any of the fields of the specified object are generics, just the +====1 +1:1023c + * invoke {@link #fromJson(JsonElement, Type)}. +2:1316,1317c +3:1277,1278c + * invoke {@link #fromJson(JsonElement, TypeToken)}. + * +====1 +1:1028c + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +2:1322c +3:1283c + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====1 +1:1030c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +2:1324c +3:1285c + * @throws JsonSyntaxException if json is not a valid representation for an object of type classOfT +====1 +1:1031a +2:1326,1328c +3:1287,1289c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(JsonElement, TypeToken) +====1 +1:1034c + Object object = fromJson(json, (Type) classOfT); +2:1331c +3:1292c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:1039c + * This method deserializes the Json read from the specified parse tree into an object of the +2:1336c +3:1297c + * This method deserializes the JSON read from the specified parse tree into an object of the +====1 +1:1042a +2:1340,1344c +3:1301,1305c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonElement, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * +====1 +1:1046,1052c + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +2:1348,1349c +3:1309,1310c + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====1 +1:1055a +2:1353,1356c +3:1314,1317c + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonElement, Class) + * @see #fromJson(JsonElement, TypeToken) +====1 +1:1058a +2:1360,1385c +3:1321,1346c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified parse tree into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(JsonElement, Class)} instead. + * + * @param the type of the desired object + * @param json the root of the parse tree of {@link JsonElement}s from which the object is to + * be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *
    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonElement, Class) + * @since 2.10 + */ + public T fromJson(JsonElement json, TypeToken typeOfT) throws JsonSyntaxException { +====1 +1:1062c + return (T) fromJson(new JsonTreeReader(json), typeOfT); +2:1389c +3:1350c + return fromJson(new JsonTreeReader(json), typeOfT); +==== +1:1065,1066c + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate; +2:1392,1402c + <<<<<<< HEAD + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; + ||||||| 47dea2ee + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate; + ======= + static class FutureTypeAdapter extends SerializationDelegatingTypeAdapter { + private TypeAdapter delegate; + >>>>>>> TEMP_RIGHT_BRANCH +3:1353,1355c + static class FutureTypeAdapter extends SerializationDelegatingTypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; +==== +1:1075c + @Override public T read(JsonReader in) throws IOException { +2:1411,1430c + <<<<<<< HEAD + public void markBroken() { + isBroken = true; + ||||||| 47dea2ee + @Override public T read(JsonReader in) throws IOException { + if (delegate == null) { + throw new IllegalStateException(); + ======= + private TypeAdapter delegate() { + if (delegate == null) { + throw new IllegalStateException("Delegate has not been set yet"); + >>>>>>> TEMP_RIGHT_BRANCH + } + <<<<<<< HEAD + + private TypeAdapter getResolvedDelegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } +3:1364,1372c + public void markBroken() { + isBroken = true; + } + + private TypeAdapter delegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } +====1 +1:1077c + throw new IllegalStateException(); +2:1432,1435c +3:1374,1377c + // Can occur when adapter is leaked to other thread or when adapter is used for (de-)serialization + // directly within the TypeAdapterFactory which requested it + throw new IllegalStateException("Adapter for type with cyclic dependency has been used" + + " before dependency has been resolved"); +==== +1:1079c + return delegate.read(in); +2:1437,1454c + return delegate; + } + + @Override public T read(JsonReader in) throws IOException { + return getResolvedDelegate().read(in); + ||||||| 47dea2ee + return delegate.read(in); + ======= + return delegate; + } + + @Override public TypeAdapter getSerializationDelegate() { + return delegate(); + } + + @Override public T read(JsonReader in) throws IOException { + return delegate().read(in); + >>>>>>> TEMP_RIGHT_BRANCH +3:1379,1387c + return delegate; + } + + @Override public TypeAdapter getSerializationDelegate() { + return delegate(); + } + + @Override public T read(JsonReader in) throws IOException { + return delegate().read(in); +==== +1:1083,1086c + if (delegate == null) { + throw new IllegalStateException(); + } + delegate.write(out, value); +2:1458,1467c + <<<<<<< HEAD + getResolvedDelegate().write(out, value); + ||||||| 47dea2ee + if (delegate == null) { + throw new IllegalStateException(); + } + delegate.write(out, value); + ======= + delegate().write(out, value); + >>>>>>> TEMP_RIGHT_BRANCH +3:1391c + delegate().write(out, value); diff --git a/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_ignorespace/diff_GsonTest.java.txt b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_ignorespace/diff_GsonTest.java.txt new file mode 100644 index 0000000000..ababbc30f8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_ignorespace/diff_GsonTest.java.txt @@ -0,0 +1,897 @@ +====1 +1:18a +2:19c +3:19c + import com.google.gson.Gson.FutureTypeAdapter; +====1 +1:19a +2:21c +3:21c + import com.google.gson.reflect.TypeToken; +====1 +1:29a +2:32c +3:32c + import java.util.Collections; +==== +1:30a +2:34,40c + <<<<<<< HEAD + import java.util.concurrent.CountDownLatch; + ||||||| 47dea2ee + ======= + import java.util.concurrent.atomic.AtomicInteger; + >>>>>>> TEMP_RIGHT_BRANCH + import java.util.concurrent.atomic.AtomicReference; +3:34,36c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.atomic.AtomicInteger; + import java.util.concurrent.atomic.AtomicReference; +====1 +1:59c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +2:69,70c +3:65,66c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====1 +1:73c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +2:84,85c +3:80,81c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +==== +1:88a +2:101,163c + public void testGetAdapter_Null() { + Gson gson = new Gson(); + try { + gson.getAdapter((TypeToken) null); + fail(); + } catch (NullPointerException e) { + assertEquals("type must not be null", e.getMessage()); + } + } + + public void testGetAdapter_Concurrency() { + class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for test"); + } + } + + final AtomicInteger adapterInstancesCreated = new AtomicInteger(0); + final AtomicReference> threadAdapter = new AtomicReference<>(); + final Class requestedType = Number.class; + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + private volatile boolean isFirstCall = true; + + @Override public TypeAdapter create(final Gson gson, TypeToken type) { + if (isFirstCall) { + isFirstCall = false; + + // Create a separate thread which requests an adapter for the same type + // This will cause this factory to return a different adapter instance than + // the one it is currently creating + Thread thread = new Thread() { + @Override public void run() { + threadAdapter.set(gson.getAdapter(requestedType)); + } + }; + thread.start(); + try { + thread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + // Create a new dummy adapter instance + adapterInstancesCreated.incrementAndGet(); + return new DummyAdapter<>(); + } + }) + .create(); + + TypeAdapter adapter = gson.getAdapter(requestedType); + assertTrue(adapter instanceof DummyAdapter); + assertEquals(2, adapterInstancesCreated.get()); + // Should be the same adapter instance the concurrent thread received + assertSame(threadAdapter.get(), adapter); + } + +3:97,149c + public void testGetAdapter_Null() { + Gson gson = new Gson(); + try { + gson.getAdapter((TypeToken) null); + fail(); + } catch (NullPointerException e) { + assertEquals("type must not be null", e.getMessage()); + } + } + + public void testGetAdapter_Concurrency() { + final AtomicInteger adapterInstancesCreated = new AtomicInteger(0); + final AtomicReference> threadAdapter = new AtomicReference<>(); + final Class requestedType = Number.class; + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + private volatile boolean isFirstCall = true; + + @Override public TypeAdapter create(final Gson gson, TypeToken type) { + if (isFirstCall) { + isFirstCall = false; + + // Create a separate thread which requests an adapter for the same type + // This will cause this factory to return a different adapter instance than + // the one it is currently creating + Thread thread = new Thread() { + @Override public void run() { + threadAdapter.set(gson.getAdapter(requestedType)); + } + }; + thread.start(); + try { + thread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + // Create a new dummy adapter instance + adapterInstancesCreated.incrementAndGet(); + return new DummyAdapter<>(); + } + }) + .create(); + + TypeAdapter adapter = gson.getAdapter(requestedType); + assertTrue(adapter instanceof DummyAdapter); + assertEquals(2, adapterInstancesCreated.get()); + // Should be the same adapter instance the concurrent thread received + assertSame(threadAdapter.get(), adapter); + } + +==== +1:154a +2:230,593c + + /** + <<<<<<< HEAD + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + if (callCount == 0) { + callCount++; + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + ||||||| 47dea2ee + ======= + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a + * {@code new Gson()} should not affect the Gson instance it came from. + */ + public void testDefaultGsonNewBuilderModification() { + Gson gson = new Gson(); + GsonBuilder gsonBuilder = gson.newBuilder(); + + // Modifications of `gsonBuilder` should not affect `gson` object + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }); + + assertDefaultGson(gson); + // New GsonBuilder created from `gson` should not have been affected by changes either + assertDefaultGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should use custom adapters + assertCustomGson(gsonBuilder.create()); + } + + private static void assertDefaultGson(Gson gson) { + // Should use default reflective adapter + String json1 = gson.toJson(new CustomClass1()); + assertEquals("{}", json1); + + // Should use default reflective adapter + String json2 = gson.toJson(new CustomClass2()); + assertEquals("{}", json2); + + // Should use default instance creator + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals(CustomClass3.NO_ARG_CONSTRUCTOR_VALUE, customClass3.s); + } + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a custom + * Gson instance (created using a GsonBuilder) should not affect the Gson instance + * it came from. + */ + public void testNewBuilderModification() { + Gson gson = new GsonBuilder() + .registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }) + .registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }) + .registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }) + .create(); + + assertCustomGson(gson); + + // Modify `gson.newBuilder()` + GsonBuilder gsonBuilder = gson.newBuilder(); + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("overwritten custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("overwritten custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("overwritten custom-instance"); + } + }); + + // `gson` object should not have been affected by changes to new GsonBuilder + assertCustomGson(gson); + // New GsonBuilder based on `gson` should not have been affected either + assertCustomGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should be affected by changes + Gson otherGson = gsonBuilder.create(); + String json1 = otherGson.toJson(new CustomClass1()); + assertEquals("\"overwritten custom-adapter\"", json1); + + String json2 = otherGson.toJson(new CustomClass2()); + assertEquals("\"overwritten custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = otherGson.fromJson("{}", CustomClass3.class); + assertEquals("overwritten custom-instance", customClass3.s); + } + + private static void assertCustomGson(Gson gson) { + String json1 = gson.toJson(new CustomClass1()); + assertEquals("\"custom-adapter\"", json1); + + String json2 = gson.toJson(new CustomClass2()); + assertEquals("\"custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals("custom-instance", customClass3.s); + } + + static class CustomClass1 { } + static class CustomClass2 { } + static class CustomClass3 { + static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance"; + + final String s; + + public CustomClass3(String s) { + this.s = s; + } + + public CustomClass3() { + this(NO_ARG_CONSTRUCTOR_VALUE); + } + >>>>>>> TEMP_RIGHT_BRANCH + } +3:216,581c + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a + * {@code new Gson()} should not affect the Gson instance it came from. + */ + public void testDefaultGsonNewBuilderModification() { + Gson gson = new Gson(); + GsonBuilder gsonBuilder = gson.newBuilder(); + + // Modifications of `gsonBuilder` should not affect `gson` object + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }); + + assertDefaultGson(gson); + // New GsonBuilder created from `gson` should not have been affected by changes either + assertDefaultGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should use custom adapters + assertCustomGson(gsonBuilder.create()); + } + + private static void assertDefaultGson(Gson gson) { + // Should use default reflective adapter + String json1 = gson.toJson(new CustomClass1()); + assertEquals("{}", json1); + + // Should use default reflective adapter + String json2 = gson.toJson(new CustomClass2()); + assertEquals("{}", json2); + + // Should use default instance creator + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals(CustomClass3.NO_ARG_CONSTRUCTOR_VALUE, customClass3.s); + } + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a custom + * Gson instance (created using a GsonBuilder) should not affect the Gson instance + * it came from. + */ + public void testNewBuilderModification() { + Gson gson = new GsonBuilder() + .registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }) + .registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }) + .registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }) + .create(); + + assertCustomGson(gson); + + // Modify `gson.newBuilder()` + GsonBuilder gsonBuilder = gson.newBuilder(); + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("overwritten custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("overwritten custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("overwritten custom-instance"); + } + }); + + // `gson` object should not have been affected by changes to new GsonBuilder + assertCustomGson(gson); + // New GsonBuilder based on `gson` should not have been affected either + assertCustomGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should be affected by changes + Gson otherGson = gsonBuilder.create(); + String json1 = otherGson.toJson(new CustomClass1()); + assertEquals("\"overwritten custom-adapter\"", json1); + + String json2 = otherGson.toJson(new CustomClass2()); + assertEquals("\"overwritten custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = otherGson.fromJson("{}", CustomClass3.class); + assertEquals("overwritten custom-instance", customClass3.s); + } + + private static void assertCustomGson(Gson gson) { + String json1 = gson.toJson(new CustomClass1()); + assertEquals("\"custom-adapter\"", json1); + + String json2 = gson.toJson(new CustomClass2()); + assertEquals("\"custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals("custom-instance", customClass3.s); + } + + static class CustomClass1 { } + static class CustomClass2 { } + static class CustomClass3 { + static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance"; + + final String s; + + public CustomClass3(String s) { + this.s = s; + } + + public CustomClass3() { + this(NO_ARG_CONSTRUCTOR_VALUE); + } + } + + /** + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + try { + if (callCount++ == 0) { + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } finally { + callCount--; + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + } diff --git a/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_imports/diff_Gson.java.txt b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_imports/diff_Gson.java.txt new file mode 100644 index 0000000000..979214efc7 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_imports/diff_Gson.java.txt @@ -0,0 +1,1124 @@ +====1 +1:19,37c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; + +2:18a +3:18a +====3 +1:53a +2:34a +3:35c + import com.google.gson.internal.bind.SerializationDelegatingTypeAdapter; +==== +1:60a +2:42,61c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +3:43,63c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +====3 +1:78c +2:79c + * String json = gson.toJson(target); // serializes target to Json +3:81c + * String json = gson.toJson(target); // serializes target to JSON +====3 +1:82,86c +2:83,87c + *

    If the object that your are serializing/deserializing is a {@code ParameterizedType} + * (i.e. contains at least one type parameter and may be an array) then you must use the + * {@link #toJson(Object, Type)} or {@link #fromJson(String, Type)} method. Here is an + * example for serializing and deserializing a {@code ParameterizedType}: + * +3:85,91c + *

    If the type of the object that you are converting is a {@code ParameterizedType} + * (i.e. has at least one type argument, for example {@code List}) then for + * deserialization you must use a {@code fromJson} method with {@link Type} or {@link TypeToken} + * parameter to specify the parameterized type. For serialization specifying a {@code Type} + * or {@code TypeToken} is optional, otherwise Gson will use the runtime type of the object. + * {@link TypeToken} is a class provided by Gson which helps creating parameterized types. + * Here is an example showing how this can be done: +====3 +1:88,90c +2:89,91c + * Type listType = new TypeToken<List<String>>() {}.getType(); + * List<String> target = new LinkedList<String>(); + * target.add("blah"); +3:93,95c + * TypeToken<List<MyType>> listType = new TypeToken<List<MyType>>() {}; + * List<MyType> target = new LinkedList<MyType>(); + * target.add(new MyType(1, "abc")); +====3 +1:93,94c +2:94,95c + * String json = gson.toJson(target, listType); + * List<String> target2 = gson.fromJson(json, listType); +3:98,103c + * // For serialization you normally do not have to specify the type, Gson will use + * // the runtime type of the objects, however you can also specify it explicitly + * String json = gson.toJson(target, listType.getType()); + * + * // But for deserialization you have to specify the type + * List<MyType> target2 = gson.fromJson(json, listType); +====3 +1:97c +2:98c + *

    See the Gson User Guide +3:106c + *

    See the Gson User Guide +====3 +1:100c +2:101c + * @see com.google.gson.reflect.TypeToken +3:109,136c + *

    Lenient JSON handling

    + * For legacy reasons most of the {@code Gson} methods allow JSON data which does not + * comply with the JSON specification, regardless of whether {@link GsonBuilder#setLenient()} + * is used or not. If this behavior is not desired, the following workarounds can be used: + * + *

    Serialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be serialized + *
    2. When using an existing {@code JsonWriter}, manually apply the writer settings of this + * {@code Gson} instance listed by {@link #newJsonWriter(Writer)}.
      + * Otherwise, when not using an existing {@code JsonWriter}, use {@link #newJsonWriter(Writer)} + * to construct one. + *
    3. Call {@link TypeAdapter#write(JsonWriter, Object)} + *
    + * + *

    Deserialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be deserialized + *
    2. When using an existing {@code JsonReader}, manually apply the reader settings of this + * {@code Gson} instance listed by {@link #newJsonReader(Reader)}.
      + * Otherwise, when not using an existing {@code JsonReader}, use {@link #newJsonReader(Reader)} + * to construct one. + *
    3. Call {@link TypeAdapter#read(JsonReader)} + *
    4. Call {@link JsonReader#peek()} and verify that the result is {@link JsonToken#END_DOCUMENT} + * to make sure there is no trailing data + *
    + * + * @see TypeToken +====3 +1:120c +2:121c + private static final TypeToken NULL_KEY_SURROGATE = TypeToken.get(Object.class); +3:155a +====1 +1:130,131c + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal, FutureTypeAdapter>>(); +2:131,132c +3:165,166c + // Uses LinkedHashMap because iteration order is important, see getAdapter() implementation below + private final ThreadLocal, TypeAdapter>> calls = new ThreadLocal<>(); +==== +1:133c + private final Map, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); +2:134c + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); +3:168c + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap<>(); +====3 +1:158a +2:159a +3:194c + final List reflectionFilters; +====3 +1:185c +2:186c + *
  • The default field naming policy for the output Json is same as in Java. So, a Java class +3:221c + *
  • The default field naming policy for the output JSON is same as in Java. So, a Java class +====3 +1:187c +2:188c + * Json. The same rules are applied for mapping incoming Json to the Java classes. You can +3:223c + * JSON. The same rules are applied for mapping incoming JSON to the Java classes. You can +====3 +1:202c +2:203c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY); +3:238,239c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====3 +1:214c +2:215c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy) { +3:251,252c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy, + List reflectionFilters) { +====3 +1:218c +2:219c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe); +3:256c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe, reflectionFilters); +====3 +1:234a +2:235a +3:273c + this.reflectionFilters = reflectionFilters; +====3 +1:236c +2:237c + List factories = new ArrayList(); +3:275c + List factories = new ArrayList<>(); +====3 +1:299c +2:300c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory)); +3:338c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory, reflectionFilters)); +====3 +1:308a +2:309a +3:348c + * @since 2.8.3 +====3 +1:371c +2:372c + out.value(value); +3:411c + out.value(doubleValue); +====3 +1:395c +2:396c + out.value(value); +3:435,438c + // For backward compatibility don't call `JsonWriter.value(float)` because that method has + // been newly added and not all custom JsonWriter implementations might override it yet + Number floatNumber = value instanceof Float ? value : floatValue; + out.value(floatNumber); +====3 +1:452c +2:453c + List list = new ArrayList(); +3:495c + List list = new ArrayList<>(); +====3 +1:475c +2:476c + @SuppressWarnings("unchecked") +3:517a +====3 +1:477c +2:478c + TypeAdapter cached = typeTokenCache.get(type == null ? NULL_KEY_SURROGATE : type); +3:519,520c + Objects.requireNonNull(type, "type must not be null"); + TypeAdapter cached = typeTokenCache.get(type); +====3 +1:479c +2:480c + return (TypeAdapter) cached; +3:522,524c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) cached; + return adapter; +====1 +1:482,483c + Map, FutureTypeAdapter> threadCalls = calls.get(); + boolean requiresThreadLocalCleanup = false; +2:483,484c +3:527,528c + LinkedHashMap, TypeAdapter> threadCalls = calls.get(); + boolean isInitialAdapterRequest = false; +====1 +1:485c + threadCalls = new HashMap, FutureTypeAdapter>(); +2:486c +3:530c + threadCalls = new LinkedHashMap<>(); +====1 +1:487c + requiresThreadLocalCleanup = true; +2:488c +3:532c + isInitialAdapterRequest = true; +==== +1:491c + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); +2:492c + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); +3:536,537c + @SuppressWarnings("unchecked") + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); +====1 +1:495a +2:497,498c +3:542,543c + int existingAdaptersCount = threadCalls.size(); + boolean foundCandidate = false; +====1 +1:497c + FutureTypeAdapter call = new FutureTypeAdapter(); +2:500c +3:545c + FutureTypeAdapter call = new FutureTypeAdapter<>(); +==== +1:504c + typeTokenCache.put(type, candidate); +2:507,519c + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + } + foundCandidate = true; +3:552,570c + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + + @SuppressWarnings("unchecked") + TypeAdapter actualAdapter = (TypeAdapter) typeTokenCache.get(type); + // Prefer the actual adapter, in case putIfAbsent call above had no effect because other + // thread already concurrently added other adapter instance for the same type + candidate = actualAdapter; + } + foundCandidate = true; +====1 +1:510,512c + threadCalls.remove(type); + + if (requiresThreadLocalCleanup) { +2:525c +3:576c + if (isInitialAdapterRequest) { +==== +1:514a +2:528,545c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +3:579,596c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +====3 +1:607c +2:638c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +3:689c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====3 +1:611,612c +2:642,643c + * @param src the object for which Json representation is to be created setting for Gson + * @return Json representation of {@code src}. +3:693,694c + * @param src the object for which JSON representation is to be created + * @return JSON representation of {@code src}. +====3 +1:613a +2:644a +3:696,697c + * + * @see #toJsonTree(Object, Type) +====3 +1:636a +2:667a +3:721,722c + * + * @see #toJsonTree(Object) +====3 +1:645c +2:676c + * This method serializes the specified object into its equivalent Json representation. +3:731c + * This method serializes the specified object into its equivalent JSON representation. +====3 +1:649c +2:680c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +3:735c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====3 +1:654c +2:685c + * @param src the object for which Json representation is to be created setting for Gson +3:740c + * @param src the object for which JSON representation is to be created +====3 +1:655a +2:686a +3:742,744c + * + * @see #toJson(Object, Appendable) + * @see #toJson(Object, Type) +====3 +1:666c +2:697c + * equivalent Json representation. This method must be used if the specified object is a generic +3:755c + * equivalent JSON representation. This method must be used if the specified object is a generic +====3 +1:677c +2:708c + * @return Json representation of {@code src} +3:766,769c + * @return JSON representation of {@code src} + * + * @see #toJson(Object, Type, Appendable) + * @see #toJson(Object) +====3 +1:686c +2:717c + * This method serializes the specified object into its equivalent Json representation. +3:778,779c + * This method serializes the specified object into its equivalent JSON representation and + * writes it to the writer. +====3 +1:690c +2:721c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +3:783c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====3 +1:694,695c +2:725,726c + * @param src the object for which Json representation is to be created setting for Gson + * @param writer Writer to which the Json representation needs to be written +3:787,788c + * @param src the object for which JSON representation is to be created + * @param writer Writer to which the JSON representation needs to be written +====3 +1:697a +2:728a +3:791,793c + * + * @see #toJson(Object) + * @see #toJson(Object, Type, Appendable) +====3 +1:709,710c +2:740,741c + * equivalent Json representation. This method must be used if the specified object is a generic + * type. For non-generic objects, use {@link #toJson(Object, Appendable)} instead. +3:805,807c + * equivalent JSON representation and writes it to the writer. + * This method must be used if the specified object is a generic type. For non-generic objects, + * use {@link #toJson(Object, Appendable)} instead. +====3 +1:719c +2:750c + * @param writer Writer to which the Json representation of src needs to be written. +3:816c + * @param writer Writer to which the JSON representation of src needs to be written. +====3 +1:721a +2:752a +3:819,821c + * + * @see #toJson(Object, Type) + * @see #toJson(Object, Appendable) +====3 +1:734a +2:765a +3:835,843c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====3 +1:737c +2:768c + @SuppressWarnings("unchecked") +3:845a +====3 +1:739c +2:770c + TypeAdapter adapter = getAdapter(TypeToken.get(typeOfSrc)); +3:847,848c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) getAdapter(TypeToken.get(typeOfSrc)); +====3 +1:747c +2:778c + ((TypeAdapter) adapter).write(writer, src); +3:856c + adapter.write(writer, src); +====3 +1:778c +2:809c + * @param writer Writer to which the Json representation needs to be written +3:887c + * @param writer Writer to which the JSON representation needs to be written +====3 +1:832a +2:863a +3:942,950c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====3 +1:858c +2:889c + * This method deserializes the specified Json into an object of the specified class. It is not +3:976c + * This method deserializes the specified JSON into an object of the specified class. It is not +====3 +1:864c +2:895c + * {@link #fromJson(String, Type)}. If you have the Json in a {@link Reader} instead of +3:982c + * {@link #fromJson(String, TypeToken)}. If you have the JSON in a {@link Reader} instead of +====3 +1:866a +2:897a +3:985,987c + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====3 +1:873a +2:904a +3:995,997c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(String, TypeToken) +====3 +1:876c +2:907c + Object object = fromJson(json, (Type) classOfT); +3:1000c + T object = fromJson(json, TypeToken.get(classOfT)); +====3 +1:881c +2:912c + * This method deserializes the specified Json into an object of the specified type. This method +3:1005c + * This method deserializes the specified JSON into an object of the specified type. This method +====3 +1:883c +2:914c + * {@link #fromJson(String, Class)} instead. If you have the Json in a {@link Reader} instead of +3:1007c + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of +====3 +1:886,889c +2:917,920c + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for +3:1010,1047c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(String, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, + * or if there is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is + * not desired. + * + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the string. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(String, Class) + * @see #fromJson(String, TypeToken) + */ + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the specified JSON into an object of the specified type. This method + * is useful if the specified object is a generic type. For non-generic objects, use + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of + * a String, use {@link #fromJson(Reader, TypeToken)} instead. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for +====3 +1:892c +2:923c + * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType(); +3:1050c + * new TypeToken<Collection<Foo>>(){} +====3 +1:896,897c +2:927,928c + * @throws JsonParseException if json is not a valid representation for an object of type typeOfT + * @throws JsonSyntaxException if json is not a valid representation for an object of type +3:1054,1058c + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(String, Class) + * @since 2.10 +====3 +1:899,900c +2:930,931c + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { +3:1060c + public T fromJson(String json, TypeToken typeOfT) throws JsonSyntaxException { +====3 +1:905,906c +2:936,937c + T target = (T) fromJson(reader, typeOfT); + return target; +3:1065c + return fromJson(reader, typeOfT); +====3 +1:910c +2:941c + * This method deserializes the Json read from the specified reader into an object of the +3:1069c + * This method deserializes the JSON read from the specified reader into an object of the +====3 +1:914c +2:945c + * this method works fine if the any of the fields of the specified object are generics, just the +3:1073c + * this method works fine if any of the fields of the specified object are generics, just the +====3 +1:916c +2:947c + * invoke {@link #fromJson(Reader, Type)}. If you have the Json in a String form instead of a +3:1075c + * invoke {@link #fromJson(Reader, TypeToken)}. If you have the JSON in a String form instead of a +====3 +1:918a +2:949a +3:1078,1080c + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====3 +1:920c +2:951c + * @param json the reader producing the Json from which the object is to be deserialized. +3:1082c + * @param json the reader producing the JSON from which the object is to be deserialized. +====3 +1:922c +2:953c + * @return an object of type T from the string. Returns {@code null} if {@code json} is at EOF. +3:1084c + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====3 +1:924c +2:955c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +3:1086c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====3 +1:925a +2:956a +3:1088,1090c + * + * @see #fromJson(String, Class) + * @see #fromJson(Reader, TypeToken) +====3 +1:928,930c +2:959,961c + JsonReader jsonReader = newJsonReader(json); + Object object = fromJson(jsonReader, classOfT); + assertFullConsumption(object, jsonReader); +3:1093c + T object = fromJson(json, TypeToken.get(classOfT)); +====3 +1:935c +2:966c + * This method deserializes the Json read from the specified reader into an object of the +3:1098c + * This method deserializes the JSON read from the specified reader into an object of the +====3 +1:937c +2:968c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the Json in a +3:1100c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a +====3 +1:939a +2:970a +3:1103,1110c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(Reader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====3 +1:941,948c +2:972,979c + * @param json the reader producing Json from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is at EOF. +3:1112,1114c + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====3 +1:950c +2:981c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +3:1116c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====3 +1:951a +2:982a +3:1118,1121c + * + * @see #fromJson(String, Type) + * @see #fromJson(Reader, Class) + * @see #fromJson(Reader, TypeToken) +====3 +1:954a +2:985a +3:1125,1153c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified reader into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a + * String form instead of a {@link Reader}, use {@link #fromJson(String, TypeToken)} instead. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * + * @param the type of the desired object + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. + * @throws JsonIOException if there was a problem reading from the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type of typeOfT + * + * @see #fromJson(String, TypeToken) + * @see #fromJson(Reader, Class) + * @since 2.10 + */ + public T fromJson(Reader json, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====3 +1:956c +2:987c + T object = (T) fromJson(jsonReader, typeOfT); +3:1155c + T object = fromJson(jsonReader, typeOfT); +====3 +1:964c +2:995c + throw new JsonIOException("JSON document was not fully consumed."); +3:1163c + throw new JsonSyntaxException("JSON document was not fully consumed."); +====3 +1:972a +2:1003a +3:1172,1174c + // fromJson(JsonReader, Class) is unfortunately missing and cannot be added now without breaking + // source compatibility in certain cases, see https://github.com/google/gson/pull/1700#discussion_r973764414 + +====3 +1:974c +2:1005c + * Reads the next JSON value from {@code reader} and convert it to an object +3:1176c + * Reads the next JSON value from {@code reader} and converts it to an object +====3 +1:976c +2:1007c + * Since Type is not parameterized by T, this method is type unsafe and should be used carefully +3:1177a +====3 +1:978,979c +2:1009,1010c + * @throws JsonIOException if there was a problem writing to the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type +3:1179,1200c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonReader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. If the provided type is a + * {@code Class} the {@code TypeToken} can be created with {@link TypeToken#get(Class)}. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonReader, TypeToken) +====3 +1:982a +2:1013a +3:1204,1237c + return (T) fromJson(reader, TypeToken.get(typeOfT)); + } + + /** + * Reads the next JSON value from {@code reader} and converts it to an object + * of type {@code typeOfT}. Returns {@code null}, if the {@code reader} is at EOF. + * This method is useful if the specified object is a generic type. For non-generic objects, + * {@link #fromJson(JsonReader, Type)} can be called, or {@link TypeToken#get(Class)} can + * be used to create the type token. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonReader, Type) + * @since 2.10 + */ + public T fromJson(JsonReader reader, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====3 +1:989,990c +2:1020,1021c + TypeToken typeToken = (TypeToken) TypeToken.get(typeOfT); + TypeAdapter typeAdapter = getAdapter(typeToken); +3:1244c + TypeAdapter typeAdapter = getAdapter(typeOfT); +====3 +1:1017c +2:1048c + * This method deserializes the Json read from the specified parse tree into an object of the +3:1271c + * This method deserializes the JSON read from the specified parse tree into an object of the +====3 +1:1021c +2:1052c + * this method works fine if the any of the fields of the specified object are generics, just the +3:1275c + * this method works fine if any of the fields of the specified object are generics, just the +====3 +1:1023c +2:1054c + * invoke {@link #fromJson(JsonElement, Type)}. +3:1277,1278c + * invoke {@link #fromJson(JsonElement, TypeToken)}. + * +====3 +1:1028c +2:1059c + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +3:1283c + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====3 +1:1030c +2:1061c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +3:1285c + * @throws JsonSyntaxException if json is not a valid representation for an object of type classOfT +====3 +1:1031a +2:1062a +3:1287,1289c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(JsonElement, TypeToken) +====3 +1:1034c +2:1065c + Object object = fromJson(json, (Type) classOfT); +3:1292c + T object = fromJson(json, TypeToken.get(classOfT)); +====3 +1:1039c +2:1070c + * This method deserializes the Json read from the specified parse tree into an object of the +3:1297c + * This method deserializes the JSON read from the specified parse tree into an object of the +====3 +1:1042a +2:1073a +3:1301,1305c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonElement, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * +====3 +1:1046,1052c +2:1077,1083c + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +3:1309,1310c + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====3 +1:1055a +2:1086a +3:1314,1317c + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonElement, Class) + * @see #fromJson(JsonElement, TypeToken) +====3 +1:1058a +2:1089a +3:1321,1346c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified parse tree into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(JsonElement, Class)} instead. + * + * @param the type of the desired object + * @param json the root of the parse tree of {@link JsonElement}s from which the object is to + * be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *
    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonElement, Class) + * @since 2.10 + */ + public T fromJson(JsonElement json, TypeToken typeOfT) throws JsonSyntaxException { +====3 +1:1062c +2:1093c + return (T) fromJson(new JsonTreeReader(json), typeOfT); +3:1350c + return fromJson(new JsonTreeReader(json), typeOfT); +==== +1:1065,1066c + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate; +2:1096,1098c + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; +3:1353,1355c + static class FutureTypeAdapter extends SerializationDelegatingTypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; +==== +1:1075c + @Override public T read(JsonReader in) throws IOException { +2:1107,1115c + public void markBroken() { + isBroken = true; + } + + private TypeAdapter getResolvedDelegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } +3:1364,1372c + public void markBroken() { + isBroken = true; + } + + private TypeAdapter delegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } +====1 +1:1077c + throw new IllegalStateException(); +2:1117,1120c +3:1374,1377c + // Can occur when adapter is leaked to other thread or when adapter is used for (de-)serialization + // directly within the TypeAdapterFactory which requested it + throw new IllegalStateException("Adapter for type with cyclic dependency has been used" + + " before dependency has been resolved"); +==== +1:1079c + return delegate.read(in); +2:1122,1126c + return delegate; + } + + @Override public T read(JsonReader in) throws IOException { + return getResolvedDelegate().read(in); +3:1379,1387c + return delegate; + } + + @Override public TypeAdapter getSerializationDelegate() { + return delegate(); + } + + @Override public T read(JsonReader in) throws IOException { + return delegate().read(in); +==== +1:1083,1086c + if (delegate == null) { + throw new IllegalStateException(); + } + delegate.write(out, value); +2:1130c + getResolvedDelegate().write(out, value); +3:1391c + delegate().write(out, value); diff --git a/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_imports/diff_GsonTest.java.txt b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_imports/diff_GsonTest.java.txt new file mode 100644 index 0000000000..76854475ca --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_imports/diff_GsonTest.java.txt @@ -0,0 +1,681 @@ +====1 +1:18a +2:19c +3:19c + import com.google.gson.Gson.FutureTypeAdapter; +====1 +1:19a +2:21c +3:21c + import com.google.gson.reflect.TypeToken; +====3 +1:29a +2:31a +3:32c + import java.util.Collections; +==== +1:30a +2:33,34c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.atomic.AtomicReference; +3:34,36c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.atomic.AtomicInteger; + import java.util.concurrent.atomic.AtomicReference; +====3 +1:59c +2:63c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +3:65,66c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====3 +1:73c +2:77c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +3:80,81c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====3 +1:88a +2:92a +3:97,149c + public void testGetAdapter_Null() { + Gson gson = new Gson(); + try { + gson.getAdapter((TypeToken) null); + fail(); + } catch (NullPointerException e) { + assertEquals("type must not be null", e.getMessage()); + } + } + + public void testGetAdapter_Concurrency() { + final AtomicInteger adapterInstancesCreated = new AtomicInteger(0); + final AtomicReference> threadAdapter = new AtomicReference<>(); + final Class requestedType = Number.class; + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + private volatile boolean isFirstCall = true; + + @Override public TypeAdapter create(final Gson gson, TypeToken type) { + if (isFirstCall) { + isFirstCall = false; + + // Create a separate thread which requests an adapter for the same type + // This will cause this factory to return a different adapter instance than + // the one it is currently creating + Thread thread = new Thread() { + @Override public void run() { + threadAdapter.set(gson.getAdapter(requestedType)); + } + }; + thread.start(); + try { + thread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + // Create a new dummy adapter instance + adapterInstancesCreated.incrementAndGet(); + return new DummyAdapter<>(); + } + }) + .create(); + + TypeAdapter adapter = gson.getAdapter(requestedType); + assertTrue(adapter instanceof DummyAdapter); + assertEquals(2, adapterInstancesCreated.get()); + // Should be the same adapter instance the concurrent thread received + assertSame(threadAdapter.get(), adapter); + } + +==== +1:154a +2:159,374c + + /** + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + if (callCount == 0) { + callCount++; + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + } +3:216,581c + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a + * {@code new Gson()} should not affect the Gson instance it came from. + */ + public void testDefaultGsonNewBuilderModification() { + Gson gson = new Gson(); + GsonBuilder gsonBuilder = gson.newBuilder(); + + // Modifications of `gsonBuilder` should not affect `gson` object + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }); + + assertDefaultGson(gson); + // New GsonBuilder created from `gson` should not have been affected by changes either + assertDefaultGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should use custom adapters + assertCustomGson(gsonBuilder.create()); + } + + private static void assertDefaultGson(Gson gson) { + // Should use default reflective adapter + String json1 = gson.toJson(new CustomClass1()); + assertEquals("{}", json1); + + // Should use default reflective adapter + String json2 = gson.toJson(new CustomClass2()); + assertEquals("{}", json2); + + // Should use default instance creator + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals(CustomClass3.NO_ARG_CONSTRUCTOR_VALUE, customClass3.s); + } + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a custom + * Gson instance (created using a GsonBuilder) should not affect the Gson instance + * it came from. + */ + public void testNewBuilderModification() { + Gson gson = new GsonBuilder() + .registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }) + .registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }) + .registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }) + .create(); + + assertCustomGson(gson); + + // Modify `gson.newBuilder()` + GsonBuilder gsonBuilder = gson.newBuilder(); + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("overwritten custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("overwritten custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("overwritten custom-instance"); + } + }); + + // `gson` object should not have been affected by changes to new GsonBuilder + assertCustomGson(gson); + // New GsonBuilder based on `gson` should not have been affected either + assertCustomGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should be affected by changes + Gson otherGson = gsonBuilder.create(); + String json1 = otherGson.toJson(new CustomClass1()); + assertEquals("\"overwritten custom-adapter\"", json1); + + String json2 = otherGson.toJson(new CustomClass2()); + assertEquals("\"overwritten custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = otherGson.fromJson("{}", CustomClass3.class); + assertEquals("overwritten custom-instance", customClass3.s); + } + + private static void assertCustomGson(Gson gson) { + String json1 = gson.toJson(new CustomClass1()); + assertEquals("\"custom-adapter\"", json1); + + String json2 = gson.toJson(new CustomClass2()); + assertEquals("\"custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals("custom-instance", customClass3.s); + } + + static class CustomClass1 { } + static class CustomClass2 { } + static class CustomClass3 { + static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance"; + + final String s; + + public CustomClass3(String s) { + this.s = s; + } + + public CustomClass3() { + this(NO_ARG_CONSTRUCTOR_VALUE); + } + } + + /** + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + try { + if (callCount++ == 0) { + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } finally { + callCount--; + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + } diff --git a/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_imports_ignorespace/diff_Gson.java.txt b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_imports_ignorespace/diff_Gson.java.txt new file mode 100644 index 0000000000..979214efc7 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_imports_ignorespace/diff_Gson.java.txt @@ -0,0 +1,1124 @@ +====1 +1:19,37c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; + +2:18a +3:18a +====3 +1:53a +2:34a +3:35c + import com.google.gson.internal.bind.SerializationDelegatingTypeAdapter; +==== +1:60a +2:42,61c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +3:43,63c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +====3 +1:78c +2:79c + * String json = gson.toJson(target); // serializes target to Json +3:81c + * String json = gson.toJson(target); // serializes target to JSON +====3 +1:82,86c +2:83,87c + *

    If the object that your are serializing/deserializing is a {@code ParameterizedType} + * (i.e. contains at least one type parameter and may be an array) then you must use the + * {@link #toJson(Object, Type)} or {@link #fromJson(String, Type)} method. Here is an + * example for serializing and deserializing a {@code ParameterizedType}: + * +3:85,91c + *

    If the type of the object that you are converting is a {@code ParameterizedType} + * (i.e. has at least one type argument, for example {@code List}) then for + * deserialization you must use a {@code fromJson} method with {@link Type} or {@link TypeToken} + * parameter to specify the parameterized type. For serialization specifying a {@code Type} + * or {@code TypeToken} is optional, otherwise Gson will use the runtime type of the object. + * {@link TypeToken} is a class provided by Gson which helps creating parameterized types. + * Here is an example showing how this can be done: +====3 +1:88,90c +2:89,91c + * Type listType = new TypeToken<List<String>>() {}.getType(); + * List<String> target = new LinkedList<String>(); + * target.add("blah"); +3:93,95c + * TypeToken<List<MyType>> listType = new TypeToken<List<MyType>>() {}; + * List<MyType> target = new LinkedList<MyType>(); + * target.add(new MyType(1, "abc")); +====3 +1:93,94c +2:94,95c + * String json = gson.toJson(target, listType); + * List<String> target2 = gson.fromJson(json, listType); +3:98,103c + * // For serialization you normally do not have to specify the type, Gson will use + * // the runtime type of the objects, however you can also specify it explicitly + * String json = gson.toJson(target, listType.getType()); + * + * // But for deserialization you have to specify the type + * List<MyType> target2 = gson.fromJson(json, listType); +====3 +1:97c +2:98c + *

    See the Gson User Guide +3:106c + *

    See the Gson User Guide +====3 +1:100c +2:101c + * @see com.google.gson.reflect.TypeToken +3:109,136c + *

    Lenient JSON handling

    + * For legacy reasons most of the {@code Gson} methods allow JSON data which does not + * comply with the JSON specification, regardless of whether {@link GsonBuilder#setLenient()} + * is used or not. If this behavior is not desired, the following workarounds can be used: + * + *

    Serialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be serialized + *
    2. When using an existing {@code JsonWriter}, manually apply the writer settings of this + * {@code Gson} instance listed by {@link #newJsonWriter(Writer)}.
      + * Otherwise, when not using an existing {@code JsonWriter}, use {@link #newJsonWriter(Writer)} + * to construct one. + *
    3. Call {@link TypeAdapter#write(JsonWriter, Object)} + *
    + * + *

    Deserialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be deserialized + *
    2. When using an existing {@code JsonReader}, manually apply the reader settings of this + * {@code Gson} instance listed by {@link #newJsonReader(Reader)}.
      + * Otherwise, when not using an existing {@code JsonReader}, use {@link #newJsonReader(Reader)} + * to construct one. + *
    3. Call {@link TypeAdapter#read(JsonReader)} + *
    4. Call {@link JsonReader#peek()} and verify that the result is {@link JsonToken#END_DOCUMENT} + * to make sure there is no trailing data + *
    + * + * @see TypeToken +====3 +1:120c +2:121c + private static final TypeToken NULL_KEY_SURROGATE = TypeToken.get(Object.class); +3:155a +====1 +1:130,131c + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal, FutureTypeAdapter>>(); +2:131,132c +3:165,166c + // Uses LinkedHashMap because iteration order is important, see getAdapter() implementation below + private final ThreadLocal, TypeAdapter>> calls = new ThreadLocal<>(); +==== +1:133c + private final Map, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); +2:134c + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); +3:168c + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap<>(); +====3 +1:158a +2:159a +3:194c + final List reflectionFilters; +====3 +1:185c +2:186c + *
  • The default field naming policy for the output Json is same as in Java. So, a Java class +3:221c + *
  • The default field naming policy for the output JSON is same as in Java. So, a Java class +====3 +1:187c +2:188c + * Json. The same rules are applied for mapping incoming Json to the Java classes. You can +3:223c + * JSON. The same rules are applied for mapping incoming JSON to the Java classes. You can +====3 +1:202c +2:203c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY); +3:238,239c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====3 +1:214c +2:215c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy) { +3:251,252c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy, + List reflectionFilters) { +====3 +1:218c +2:219c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe); +3:256c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe, reflectionFilters); +====3 +1:234a +2:235a +3:273c + this.reflectionFilters = reflectionFilters; +====3 +1:236c +2:237c + List factories = new ArrayList(); +3:275c + List factories = new ArrayList<>(); +====3 +1:299c +2:300c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory)); +3:338c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory, reflectionFilters)); +====3 +1:308a +2:309a +3:348c + * @since 2.8.3 +====3 +1:371c +2:372c + out.value(value); +3:411c + out.value(doubleValue); +====3 +1:395c +2:396c + out.value(value); +3:435,438c + // For backward compatibility don't call `JsonWriter.value(float)` because that method has + // been newly added and not all custom JsonWriter implementations might override it yet + Number floatNumber = value instanceof Float ? value : floatValue; + out.value(floatNumber); +====3 +1:452c +2:453c + List list = new ArrayList(); +3:495c + List list = new ArrayList<>(); +====3 +1:475c +2:476c + @SuppressWarnings("unchecked") +3:517a +====3 +1:477c +2:478c + TypeAdapter cached = typeTokenCache.get(type == null ? NULL_KEY_SURROGATE : type); +3:519,520c + Objects.requireNonNull(type, "type must not be null"); + TypeAdapter cached = typeTokenCache.get(type); +====3 +1:479c +2:480c + return (TypeAdapter) cached; +3:522,524c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) cached; + return adapter; +====1 +1:482,483c + Map, FutureTypeAdapter> threadCalls = calls.get(); + boolean requiresThreadLocalCleanup = false; +2:483,484c +3:527,528c + LinkedHashMap, TypeAdapter> threadCalls = calls.get(); + boolean isInitialAdapterRequest = false; +====1 +1:485c + threadCalls = new HashMap, FutureTypeAdapter>(); +2:486c +3:530c + threadCalls = new LinkedHashMap<>(); +====1 +1:487c + requiresThreadLocalCleanup = true; +2:488c +3:532c + isInitialAdapterRequest = true; +==== +1:491c + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); +2:492c + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); +3:536,537c + @SuppressWarnings("unchecked") + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); +====1 +1:495a +2:497,498c +3:542,543c + int existingAdaptersCount = threadCalls.size(); + boolean foundCandidate = false; +====1 +1:497c + FutureTypeAdapter call = new FutureTypeAdapter(); +2:500c +3:545c + FutureTypeAdapter call = new FutureTypeAdapter<>(); +==== +1:504c + typeTokenCache.put(type, candidate); +2:507,519c + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + } + foundCandidate = true; +3:552,570c + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + + @SuppressWarnings("unchecked") + TypeAdapter actualAdapter = (TypeAdapter) typeTokenCache.get(type); + // Prefer the actual adapter, in case putIfAbsent call above had no effect because other + // thread already concurrently added other adapter instance for the same type + candidate = actualAdapter; + } + foundCandidate = true; +====1 +1:510,512c + threadCalls.remove(type); + + if (requiresThreadLocalCleanup) { +2:525c +3:576c + if (isInitialAdapterRequest) { +==== +1:514a +2:528,545c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +3:579,596c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +====3 +1:607c +2:638c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +3:689c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====3 +1:611,612c +2:642,643c + * @param src the object for which Json representation is to be created setting for Gson + * @return Json representation of {@code src}. +3:693,694c + * @param src the object for which JSON representation is to be created + * @return JSON representation of {@code src}. +====3 +1:613a +2:644a +3:696,697c + * + * @see #toJsonTree(Object, Type) +====3 +1:636a +2:667a +3:721,722c + * + * @see #toJsonTree(Object) +====3 +1:645c +2:676c + * This method serializes the specified object into its equivalent Json representation. +3:731c + * This method serializes the specified object into its equivalent JSON representation. +====3 +1:649c +2:680c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +3:735c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====3 +1:654c +2:685c + * @param src the object for which Json representation is to be created setting for Gson +3:740c + * @param src the object for which JSON representation is to be created +====3 +1:655a +2:686a +3:742,744c + * + * @see #toJson(Object, Appendable) + * @see #toJson(Object, Type) +====3 +1:666c +2:697c + * equivalent Json representation. This method must be used if the specified object is a generic +3:755c + * equivalent JSON representation. This method must be used if the specified object is a generic +====3 +1:677c +2:708c + * @return Json representation of {@code src} +3:766,769c + * @return JSON representation of {@code src} + * + * @see #toJson(Object, Type, Appendable) + * @see #toJson(Object) +====3 +1:686c +2:717c + * This method serializes the specified object into its equivalent Json representation. +3:778,779c + * This method serializes the specified object into its equivalent JSON representation and + * writes it to the writer. +====3 +1:690c +2:721c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +3:783c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====3 +1:694,695c +2:725,726c + * @param src the object for which Json representation is to be created setting for Gson + * @param writer Writer to which the Json representation needs to be written +3:787,788c + * @param src the object for which JSON representation is to be created + * @param writer Writer to which the JSON representation needs to be written +====3 +1:697a +2:728a +3:791,793c + * + * @see #toJson(Object) + * @see #toJson(Object, Type, Appendable) +====3 +1:709,710c +2:740,741c + * equivalent Json representation. This method must be used if the specified object is a generic + * type. For non-generic objects, use {@link #toJson(Object, Appendable)} instead. +3:805,807c + * equivalent JSON representation and writes it to the writer. + * This method must be used if the specified object is a generic type. For non-generic objects, + * use {@link #toJson(Object, Appendable)} instead. +====3 +1:719c +2:750c + * @param writer Writer to which the Json representation of src needs to be written. +3:816c + * @param writer Writer to which the JSON representation of src needs to be written. +====3 +1:721a +2:752a +3:819,821c + * + * @see #toJson(Object, Type) + * @see #toJson(Object, Appendable) +====3 +1:734a +2:765a +3:835,843c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====3 +1:737c +2:768c + @SuppressWarnings("unchecked") +3:845a +====3 +1:739c +2:770c + TypeAdapter adapter = getAdapter(TypeToken.get(typeOfSrc)); +3:847,848c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) getAdapter(TypeToken.get(typeOfSrc)); +====3 +1:747c +2:778c + ((TypeAdapter) adapter).write(writer, src); +3:856c + adapter.write(writer, src); +====3 +1:778c +2:809c + * @param writer Writer to which the Json representation needs to be written +3:887c + * @param writer Writer to which the JSON representation needs to be written +====3 +1:832a +2:863a +3:942,950c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====3 +1:858c +2:889c + * This method deserializes the specified Json into an object of the specified class. It is not +3:976c + * This method deserializes the specified JSON into an object of the specified class. It is not +====3 +1:864c +2:895c + * {@link #fromJson(String, Type)}. If you have the Json in a {@link Reader} instead of +3:982c + * {@link #fromJson(String, TypeToken)}. If you have the JSON in a {@link Reader} instead of +====3 +1:866a +2:897a +3:985,987c + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====3 +1:873a +2:904a +3:995,997c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(String, TypeToken) +====3 +1:876c +2:907c + Object object = fromJson(json, (Type) classOfT); +3:1000c + T object = fromJson(json, TypeToken.get(classOfT)); +====3 +1:881c +2:912c + * This method deserializes the specified Json into an object of the specified type. This method +3:1005c + * This method deserializes the specified JSON into an object of the specified type. This method +====3 +1:883c +2:914c + * {@link #fromJson(String, Class)} instead. If you have the Json in a {@link Reader} instead of +3:1007c + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of +====3 +1:886,889c +2:917,920c + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for +3:1010,1047c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(String, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, + * or if there is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is + * not desired. + * + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the string. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(String, Class) + * @see #fromJson(String, TypeToken) + */ + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the specified JSON into an object of the specified type. This method + * is useful if the specified object is a generic type. For non-generic objects, use + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of + * a String, use {@link #fromJson(Reader, TypeToken)} instead. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for +====3 +1:892c +2:923c + * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType(); +3:1050c + * new TypeToken<Collection<Foo>>(){} +====3 +1:896,897c +2:927,928c + * @throws JsonParseException if json is not a valid representation for an object of type typeOfT + * @throws JsonSyntaxException if json is not a valid representation for an object of type +3:1054,1058c + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(String, Class) + * @since 2.10 +====3 +1:899,900c +2:930,931c + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { +3:1060c + public T fromJson(String json, TypeToken typeOfT) throws JsonSyntaxException { +====3 +1:905,906c +2:936,937c + T target = (T) fromJson(reader, typeOfT); + return target; +3:1065c + return fromJson(reader, typeOfT); +====3 +1:910c +2:941c + * This method deserializes the Json read from the specified reader into an object of the +3:1069c + * This method deserializes the JSON read from the specified reader into an object of the +====3 +1:914c +2:945c + * this method works fine if the any of the fields of the specified object are generics, just the +3:1073c + * this method works fine if any of the fields of the specified object are generics, just the +====3 +1:916c +2:947c + * invoke {@link #fromJson(Reader, Type)}. If you have the Json in a String form instead of a +3:1075c + * invoke {@link #fromJson(Reader, TypeToken)}. If you have the JSON in a String form instead of a +====3 +1:918a +2:949a +3:1078,1080c + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====3 +1:920c +2:951c + * @param json the reader producing the Json from which the object is to be deserialized. +3:1082c + * @param json the reader producing the JSON from which the object is to be deserialized. +====3 +1:922c +2:953c + * @return an object of type T from the string. Returns {@code null} if {@code json} is at EOF. +3:1084c + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====3 +1:924c +2:955c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +3:1086c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====3 +1:925a +2:956a +3:1088,1090c + * + * @see #fromJson(String, Class) + * @see #fromJson(Reader, TypeToken) +====3 +1:928,930c +2:959,961c + JsonReader jsonReader = newJsonReader(json); + Object object = fromJson(jsonReader, classOfT); + assertFullConsumption(object, jsonReader); +3:1093c + T object = fromJson(json, TypeToken.get(classOfT)); +====3 +1:935c +2:966c + * This method deserializes the Json read from the specified reader into an object of the +3:1098c + * This method deserializes the JSON read from the specified reader into an object of the +====3 +1:937c +2:968c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the Json in a +3:1100c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a +====3 +1:939a +2:970a +3:1103,1110c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(Reader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====3 +1:941,948c +2:972,979c + * @param json the reader producing Json from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is at EOF. +3:1112,1114c + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====3 +1:950c +2:981c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +3:1116c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====3 +1:951a +2:982a +3:1118,1121c + * + * @see #fromJson(String, Type) + * @see #fromJson(Reader, Class) + * @see #fromJson(Reader, TypeToken) +====3 +1:954a +2:985a +3:1125,1153c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified reader into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a + * String form instead of a {@link Reader}, use {@link #fromJson(String, TypeToken)} instead. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * + * @param the type of the desired object + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. + * @throws JsonIOException if there was a problem reading from the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type of typeOfT + * + * @see #fromJson(String, TypeToken) + * @see #fromJson(Reader, Class) + * @since 2.10 + */ + public T fromJson(Reader json, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====3 +1:956c +2:987c + T object = (T) fromJson(jsonReader, typeOfT); +3:1155c + T object = fromJson(jsonReader, typeOfT); +====3 +1:964c +2:995c + throw new JsonIOException("JSON document was not fully consumed."); +3:1163c + throw new JsonSyntaxException("JSON document was not fully consumed."); +====3 +1:972a +2:1003a +3:1172,1174c + // fromJson(JsonReader, Class) is unfortunately missing and cannot be added now without breaking + // source compatibility in certain cases, see https://github.com/google/gson/pull/1700#discussion_r973764414 + +====3 +1:974c +2:1005c + * Reads the next JSON value from {@code reader} and convert it to an object +3:1176c + * Reads the next JSON value from {@code reader} and converts it to an object +====3 +1:976c +2:1007c + * Since Type is not parameterized by T, this method is type unsafe and should be used carefully +3:1177a +====3 +1:978,979c +2:1009,1010c + * @throws JsonIOException if there was a problem writing to the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type +3:1179,1200c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonReader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. If the provided type is a + * {@code Class} the {@code TypeToken} can be created with {@link TypeToken#get(Class)}. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonReader, TypeToken) +====3 +1:982a +2:1013a +3:1204,1237c + return (T) fromJson(reader, TypeToken.get(typeOfT)); + } + + /** + * Reads the next JSON value from {@code reader} and converts it to an object + * of type {@code typeOfT}. Returns {@code null}, if the {@code reader} is at EOF. + * This method is useful if the specified object is a generic type. For non-generic objects, + * {@link #fromJson(JsonReader, Type)} can be called, or {@link TypeToken#get(Class)} can + * be used to create the type token. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonReader, Type) + * @since 2.10 + */ + public T fromJson(JsonReader reader, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====3 +1:989,990c +2:1020,1021c + TypeToken typeToken = (TypeToken) TypeToken.get(typeOfT); + TypeAdapter typeAdapter = getAdapter(typeToken); +3:1244c + TypeAdapter typeAdapter = getAdapter(typeOfT); +====3 +1:1017c +2:1048c + * This method deserializes the Json read from the specified parse tree into an object of the +3:1271c + * This method deserializes the JSON read from the specified parse tree into an object of the +====3 +1:1021c +2:1052c + * this method works fine if the any of the fields of the specified object are generics, just the +3:1275c + * this method works fine if any of the fields of the specified object are generics, just the +====3 +1:1023c +2:1054c + * invoke {@link #fromJson(JsonElement, Type)}. +3:1277,1278c + * invoke {@link #fromJson(JsonElement, TypeToken)}. + * +====3 +1:1028c +2:1059c + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +3:1283c + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====3 +1:1030c +2:1061c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +3:1285c + * @throws JsonSyntaxException if json is not a valid representation for an object of type classOfT +====3 +1:1031a +2:1062a +3:1287,1289c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(JsonElement, TypeToken) +====3 +1:1034c +2:1065c + Object object = fromJson(json, (Type) classOfT); +3:1292c + T object = fromJson(json, TypeToken.get(classOfT)); +====3 +1:1039c +2:1070c + * This method deserializes the Json read from the specified parse tree into an object of the +3:1297c + * This method deserializes the JSON read from the specified parse tree into an object of the +====3 +1:1042a +2:1073a +3:1301,1305c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonElement, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * +====3 +1:1046,1052c +2:1077,1083c + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +3:1309,1310c + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====3 +1:1055a +2:1086a +3:1314,1317c + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonElement, Class) + * @see #fromJson(JsonElement, TypeToken) +====3 +1:1058a +2:1089a +3:1321,1346c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified parse tree into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(JsonElement, Class)} instead. + * + * @param the type of the desired object + * @param json the root of the parse tree of {@link JsonElement}s from which the object is to + * be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *
    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonElement, Class) + * @since 2.10 + */ + public T fromJson(JsonElement json, TypeToken typeOfT) throws JsonSyntaxException { +====3 +1:1062c +2:1093c + return (T) fromJson(new JsonTreeReader(json), typeOfT); +3:1350c + return fromJson(new JsonTreeReader(json), typeOfT); +==== +1:1065,1066c + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate; +2:1096,1098c + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; +3:1353,1355c + static class FutureTypeAdapter extends SerializationDelegatingTypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; +==== +1:1075c + @Override public T read(JsonReader in) throws IOException { +2:1107,1115c + public void markBroken() { + isBroken = true; + } + + private TypeAdapter getResolvedDelegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } +3:1364,1372c + public void markBroken() { + isBroken = true; + } + + private TypeAdapter delegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } +====1 +1:1077c + throw new IllegalStateException(); +2:1117,1120c +3:1374,1377c + // Can occur when adapter is leaked to other thread or when adapter is used for (de-)serialization + // directly within the TypeAdapterFactory which requested it + throw new IllegalStateException("Adapter for type with cyclic dependency has been used" + + " before dependency has been resolved"); +==== +1:1079c + return delegate.read(in); +2:1122,1126c + return delegate; + } + + @Override public T read(JsonReader in) throws IOException { + return getResolvedDelegate().read(in); +3:1379,1387c + return delegate; + } + + @Override public TypeAdapter getSerializationDelegate() { + return delegate(); + } + + @Override public T read(JsonReader in) throws IOException { + return delegate().read(in); +==== +1:1083,1086c + if (delegate == null) { + throw new IllegalStateException(); + } + delegate.write(out, value); +2:1130c + getResolvedDelegate().write(out, value); +3:1391c + delegate().write(out, value); diff --git a/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_imports_ignorespace/diff_GsonTest.java.txt b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_imports_ignorespace/diff_GsonTest.java.txt new file mode 100644 index 0000000000..76854475ca --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_ort_imports_ignorespace/diff_GsonTest.java.txt @@ -0,0 +1,681 @@ +====1 +1:18a +2:19c +3:19c + import com.google.gson.Gson.FutureTypeAdapter; +====1 +1:19a +2:21c +3:21c + import com.google.gson.reflect.TypeToken; +====3 +1:29a +2:31a +3:32c + import java.util.Collections; +==== +1:30a +2:33,34c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.atomic.AtomicReference; +3:34,36c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.atomic.AtomicInteger; + import java.util.concurrent.atomic.AtomicReference; +====3 +1:59c +2:63c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +3:65,66c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====3 +1:73c +2:77c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +3:80,81c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====3 +1:88a +2:92a +3:97,149c + public void testGetAdapter_Null() { + Gson gson = new Gson(); + try { + gson.getAdapter((TypeToken) null); + fail(); + } catch (NullPointerException e) { + assertEquals("type must not be null", e.getMessage()); + } + } + + public void testGetAdapter_Concurrency() { + final AtomicInteger adapterInstancesCreated = new AtomicInteger(0); + final AtomicReference> threadAdapter = new AtomicReference<>(); + final Class requestedType = Number.class; + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + private volatile boolean isFirstCall = true; + + @Override public TypeAdapter create(final Gson gson, TypeToken type) { + if (isFirstCall) { + isFirstCall = false; + + // Create a separate thread which requests an adapter for the same type + // This will cause this factory to return a different adapter instance than + // the one it is currently creating + Thread thread = new Thread() { + @Override public void run() { + threadAdapter.set(gson.getAdapter(requestedType)); + } + }; + thread.start(); + try { + thread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + // Create a new dummy adapter instance + adapterInstancesCreated.incrementAndGet(); + return new DummyAdapter<>(); + } + }) + .create(); + + TypeAdapter adapter = gson.getAdapter(requestedType); + assertTrue(adapter instanceof DummyAdapter); + assertEquals(2, adapterInstancesCreated.get()); + // Should be the same adapter instance the concurrent thread received + assertSame(threadAdapter.get(), adapter); + } + +==== +1:154a +2:159,374c + + /** + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + if (callCount == 0) { + callCount++; + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + } +3:216,581c + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a + * {@code new Gson()} should not affect the Gson instance it came from. + */ + public void testDefaultGsonNewBuilderModification() { + Gson gson = new Gson(); + GsonBuilder gsonBuilder = gson.newBuilder(); + + // Modifications of `gsonBuilder` should not affect `gson` object + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }); + + assertDefaultGson(gson); + // New GsonBuilder created from `gson` should not have been affected by changes either + assertDefaultGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should use custom adapters + assertCustomGson(gsonBuilder.create()); + } + + private static void assertDefaultGson(Gson gson) { + // Should use default reflective adapter + String json1 = gson.toJson(new CustomClass1()); + assertEquals("{}", json1); + + // Should use default reflective adapter + String json2 = gson.toJson(new CustomClass2()); + assertEquals("{}", json2); + + // Should use default instance creator + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals(CustomClass3.NO_ARG_CONSTRUCTOR_VALUE, customClass3.s); + } + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a custom + * Gson instance (created using a GsonBuilder) should not affect the Gson instance + * it came from. + */ + public void testNewBuilderModification() { + Gson gson = new GsonBuilder() + .registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }) + .registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }) + .registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }) + .create(); + + assertCustomGson(gson); + + // Modify `gson.newBuilder()` + GsonBuilder gsonBuilder = gson.newBuilder(); + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("overwritten custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("overwritten custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("overwritten custom-instance"); + } + }); + + // `gson` object should not have been affected by changes to new GsonBuilder + assertCustomGson(gson); + // New GsonBuilder based on `gson` should not have been affected either + assertCustomGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should be affected by changes + Gson otherGson = gsonBuilder.create(); + String json1 = otherGson.toJson(new CustomClass1()); + assertEquals("\"overwritten custom-adapter\"", json1); + + String json2 = otherGson.toJson(new CustomClass2()); + assertEquals("\"overwritten custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = otherGson.fromJson("{}", CustomClass3.class); + assertEquals("overwritten custom-instance", customClass3.s); + } + + private static void assertCustomGson(Gson gson) { + String json1 = gson.toJson(new CustomClass1()); + assertEquals("\"custom-adapter\"", json1); + + String json2 = gson.toJson(new CustomClass2()); + assertEquals("\"custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals("custom-instance", customClass3.s); + } + + static class CustomClass1 { } + static class CustomClass2 { } + static class CustomClass3 { + static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance"; + + final String s; + + public CustomClass3(String s) { + this.s = s; + } + + public CustomClass3() { + this(NO_ARG_CONSTRUCTOR_VALUE); + } + } + + /** + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + try { + if (callCount++ == 0) { + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } finally { + callCount--; + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + } diff --git a/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_histogram/diff_Gson.java.txt b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_histogram/diff_Gson.java.txt new file mode 100644 index 0000000000..588a5039ad --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_histogram/diff_Gson.java.txt @@ -0,0 +1,1218 @@ +====1 +1:19,37c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; + +2:18a +3:18a +====1 +1:53a +2:35c +3:35c + import com.google.gson.internal.bind.SerializationDelegatingTypeAdapter; +==== +1:60a +2:43,70c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + <<<<<<< HEAD + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + ||||||| 47dea2ee + ======= + import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + >>>>>>> TEMP_RIGHT_BRANCH + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +3:43,63c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +====1 +1:78c + * String json = gson.toJson(target); // serializes target to Json +2:88c +3:81c + * String json = gson.toJson(target); // serializes target to JSON +====1 +1:82,86c + *

    If the object that your are serializing/deserializing is a {@code ParameterizedType} + * (i.e. contains at least one type parameter and may be an array) then you must use the + * {@link #toJson(Object, Type)} or {@link #fromJson(String, Type)} method. Here is an + * example for serializing and deserializing a {@code ParameterizedType}: + * +2:92,98c +3:85,91c + *

    If the type of the object that you are converting is a {@code ParameterizedType} + * (i.e. has at least one type argument, for example {@code List}) then for + * deserialization you must use a {@code fromJson} method with {@link Type} or {@link TypeToken} + * parameter to specify the parameterized type. For serialization specifying a {@code Type} + * or {@code TypeToken} is optional, otherwise Gson will use the runtime type of the object. + * {@link TypeToken} is a class provided by Gson which helps creating parameterized types. + * Here is an example showing how this can be done: +====1 +1:88,90c + * Type listType = new TypeToken<List<String>>() {}.getType(); + * List<String> target = new LinkedList<String>(); + * target.add("blah"); +2:100,102c +3:93,95c + * TypeToken<List<MyType>> listType = new TypeToken<List<MyType>>() {}; + * List<MyType> target = new LinkedList<MyType>(); + * target.add(new MyType(1, "abc")); +====1 +1:93,94c + * String json = gson.toJson(target, listType); + * List<String> target2 = gson.fromJson(json, listType); +2:105,110c +3:98,103c + * // For serialization you normally do not have to specify the type, Gson will use + * // the runtime type of the objects, however you can also specify it explicitly + * String json = gson.toJson(target, listType.getType()); + * + * // But for deserialization you have to specify the type + * List<MyType> target2 = gson.fromJson(json, listType); +====1 +1:97c + *

    See the Gson User Guide +2:113c +3:106c + *

    See the Gson User Guide +====1 +1:100c + * @see com.google.gson.reflect.TypeToken +2:116,143c +3:109,136c + *

    Lenient JSON handling

    + * For legacy reasons most of the {@code Gson} methods allow JSON data which does not + * comply with the JSON specification, regardless of whether {@link GsonBuilder#setLenient()} + * is used or not. If this behavior is not desired, the following workarounds can be used: + * + *

    Serialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be serialized + *
    2. When using an existing {@code JsonWriter}, manually apply the writer settings of this + * {@code Gson} instance listed by {@link #newJsonWriter(Writer)}.
      + * Otherwise, when not using an existing {@code JsonWriter}, use {@link #newJsonWriter(Writer)} + * to construct one. + *
    3. Call {@link TypeAdapter#write(JsonWriter, Object)} + *
    + * + *

    Deserialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be deserialized + *
    2. When using an existing {@code JsonReader}, manually apply the reader settings of this + * {@code Gson} instance listed by {@link #newJsonReader(Reader)}.
      + * Otherwise, when not using an existing {@code JsonReader}, use {@link #newJsonReader(Reader)} + * to construct one. + *
    3. Call {@link TypeAdapter#read(JsonReader)} + *
    4. Call {@link JsonReader#peek()} and verify that the result is {@link JsonToken#END_DOCUMENT} + * to make sure there is no trailing data + *
    + * + * @see TypeToken +====1 +1:120c + private static final TypeToken NULL_KEY_SURROGATE = TypeToken.get(Object.class); +2:162a +3:155a +==== +1:130,133c + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal, FutureTypeAdapter>>(); + + private final Map, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); +2:172,189c + <<<<<<< HEAD + // Uses LinkedHashMap because iteration order is important, see getAdapter() implementation below + private final ThreadLocal, TypeAdapter>> calls = new ThreadLocal<>(); + ||||||| 47dea2ee + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal, FutureTypeAdapter>>(); + ======= + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal<>(); + >>>>>>> TEMP_RIGHT_BRANCH + + <<<<<<< HEAD + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); + ||||||| 47dea2ee + private final Map, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); + ======= + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap<>(); + >>>>>>> TEMP_RIGHT_BRANCH +3:165,168c + // Uses LinkedHashMap because iteration order is important, see getAdapter() implementation below + private final ThreadLocal, TypeAdapter>> calls = new ThreadLocal<>(); + + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap<>(); +====1 +1:158a +2:215c +3:194c + final List reflectionFilters; +====1 +1:185c + *
  • The default field naming policy for the output Json is same as in Java. So, a Java class +2:242c +3:221c + *
  • The default field naming policy for the output JSON is same as in Java. So, a Java class +====1 +1:187c + * Json. The same rules are applied for mapping incoming Json to the Java classes. You can +2:244c +3:223c + * JSON. The same rules are applied for mapping incoming JSON to the Java classes. You can +====1 +1:202c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY); +2:259,260c +3:238,239c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====1 +1:214c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy) { +2:272,273c +3:251,252c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy, + List reflectionFilters) { +====1 +1:218c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe); +2:277c +3:256c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe, reflectionFilters); +====1 +1:234a +2:294c +3:273c + this.reflectionFilters = reflectionFilters; +====1 +1:236c + List factories = new ArrayList(); +2:296c +3:275c + List factories = new ArrayList<>(); +====1 +1:299c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory)); +2:359c +3:338c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory, reflectionFilters)); +====1 +1:308a +2:369c +3:348c + * @since 2.8.3 +====1 +1:371c + out.value(value); +2:432c +3:411c + out.value(doubleValue); +====1 +1:395c + out.value(value); +2:456,459c +3:435,438c + // For backward compatibility don't call `JsonWriter.value(float)` because that method has + // been newly added and not all custom JsonWriter implementations might override it yet + Number floatNumber = value instanceof Float ? value : floatValue; + out.value(floatNumber); +====1 +1:452c + List list = new ArrayList(); +2:516c +3:495c + List list = new ArrayList<>(); +====1 +1:475c + @SuppressWarnings("unchecked") +2:538a +3:517a +====1 +1:477c + TypeAdapter cached = typeTokenCache.get(type == null ? NULL_KEY_SURROGATE : type); +2:540,541c +3:519,520c + Objects.requireNonNull(type, "type must not be null"); + TypeAdapter cached = typeTokenCache.get(type); +====1 +1:479c + return (TypeAdapter) cached; +2:543,545c +3:522,524c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) cached; + return adapter; +====1 +1:482,483c + Map, FutureTypeAdapter> threadCalls = calls.get(); + boolean requiresThreadLocalCleanup = false; +2:548,549c +3:527,528c + LinkedHashMap, TypeAdapter> threadCalls = calls.get(); + boolean isInitialAdapterRequest = false; +==== +1:485c + threadCalls = new HashMap, FutureTypeAdapter>(); +2:551,557c + <<<<<<< HEAD + threadCalls = new LinkedHashMap<>(); + ||||||| 47dea2ee + threadCalls = new HashMap, FutureTypeAdapter>(); + ======= + threadCalls = new HashMap<>(); + >>>>>>> TEMP_RIGHT_BRANCH +3:530c + threadCalls = new LinkedHashMap<>(); +====1 +1:487c + requiresThreadLocalCleanup = true; +2:559c +3:532c + isInitialAdapterRequest = true; +==== +1:491c + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); +2:563,570c + <<<<<<< HEAD + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); + ||||||| 47dea2ee + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); + ======= + @SuppressWarnings("unchecked") + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); + >>>>>>> TEMP_RIGHT_BRANCH +3:536,537c + @SuppressWarnings("unchecked") + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); +====1 +1:495a +2:575,576c +3:542,543c + int existingAdaptersCount = threadCalls.size(); + boolean foundCandidate = false; +====1 +1:497c + FutureTypeAdapter call = new FutureTypeAdapter(); +2:578c +3:545c + FutureTypeAdapter call = new FutureTypeAdapter<>(); +====2 +1:502a +3:550a +2:584,590c + @SuppressWarnings("unchecked") + TypeAdapter existingAdapter = (TypeAdapter) typeTokenCache.putIfAbsent(type, candidate); + // If other thread concurrently added adapter prefer that one instead + if (existingAdapter != null) { + candidate = existingAdapter; + } + +==== +1:504c + typeTokenCache.put(type, candidate); +2:592,609c + <<<<<<< HEAD + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + } + foundCandidate = true; + ||||||| 47dea2ee + typeTokenCache.put(type, candidate); + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:552,570c + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + + @SuppressWarnings("unchecked") + TypeAdapter actualAdapter = (TypeAdapter) typeTokenCache.get(type); + // Prefer the actual adapter, in case putIfAbsent call above had no effect because other + // thread already concurrently added other adapter instance for the same type + candidate = actualAdapter; + } + foundCandidate = true; +====1 +1:510,512c + threadCalls.remove(type); + + if (requiresThreadLocalCleanup) { +2:615c +3:576c + if (isInitialAdapterRequest) { +==== +1:514a +2:618,635c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +3:579,596c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +====1 +1:607c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:728c +3:689c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:611,612c + * @param src the object for which Json representation is to be created setting for Gson + * @return Json representation of {@code src}. +2:732,733c +3:693,694c + * @param src the object for which JSON representation is to be created + * @return JSON representation of {@code src}. +====1 +1:613a +2:735,736c +3:696,697c + * + * @see #toJsonTree(Object, Type) +====1 +1:636a +2:760,761c +3:721,722c + * + * @see #toJsonTree(Object) +====1 +1:645c + * This method serializes the specified object into its equivalent Json representation. +2:770c +3:731c + * This method serializes the specified object into its equivalent JSON representation. +====1 +1:649c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:774c +3:735c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:654c + * @param src the object for which Json representation is to be created setting for Gson +2:779c +3:740c + * @param src the object for which JSON representation is to be created +====1 +1:655a +2:781,783c +3:742,744c + * + * @see #toJson(Object, Appendable) + * @see #toJson(Object, Type) +====1 +1:666c + * equivalent Json representation. This method must be used if the specified object is a generic +2:794c +3:755c + * equivalent JSON representation. This method must be used if the specified object is a generic +====1 +1:677c + * @return Json representation of {@code src} +2:805,808c +3:766,769c + * @return JSON representation of {@code src} + * + * @see #toJson(Object, Type, Appendable) + * @see #toJson(Object) +====1 +1:686c + * This method serializes the specified object into its equivalent Json representation. +2:817,818c +3:778,779c + * This method serializes the specified object into its equivalent JSON representation and + * writes it to the writer. +====1 +1:690c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:822c +3:783c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:694,695c + * @param src the object for which Json representation is to be created setting for Gson + * @param writer Writer to which the Json representation needs to be written +2:826,827c +3:787,788c + * @param src the object for which JSON representation is to be created + * @param writer Writer to which the JSON representation needs to be written +====1 +1:697a +2:830,832c +3:791,793c + * + * @see #toJson(Object) + * @see #toJson(Object, Type, Appendable) +====1 +1:709,710c + * equivalent Json representation. This method must be used if the specified object is a generic + * type. For non-generic objects, use {@link #toJson(Object, Appendable)} instead. +2:844,846c +3:805,807c + * equivalent JSON representation and writes it to the writer. + * This method must be used if the specified object is a generic type. For non-generic objects, + * use {@link #toJson(Object, Appendable)} instead. +====1 +1:719c + * @param writer Writer to which the Json representation of src needs to be written. +2:855c +3:816c + * @param writer Writer to which the JSON representation of src needs to be written. +====1 +1:721a +2:858,860c +3:819,821c + * + * @see #toJson(Object, Type) + * @see #toJson(Object, Appendable) +====1 +1:734a +2:874,882c +3:835,843c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====1 +1:737c + @SuppressWarnings("unchecked") +2:884a +3:845a +====1 +1:739c + TypeAdapter adapter = getAdapter(TypeToken.get(typeOfSrc)); +2:886,887c +3:847,848c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) getAdapter(TypeToken.get(typeOfSrc)); +====1 +1:747c + ((TypeAdapter) adapter).write(writer, src); +2:895c +3:856c + adapter.write(writer, src); +====1 +1:778c + * @param writer Writer to which the Json representation needs to be written +2:926c +3:887c + * @param writer Writer to which the JSON representation needs to be written +====1 +1:832a +2:981,989c +3:942,950c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====1 +1:858c + * This method deserializes the specified Json into an object of the specified class. It is not +2:1015c +3:976c + * This method deserializes the specified JSON into an object of the specified class. It is not +====1 +1:864c + * {@link #fromJson(String, Type)}. If you have the Json in a {@link Reader} instead of +2:1021c +3:982c + * {@link #fromJson(String, TypeToken)}. If you have the JSON in a {@link Reader} instead of +====1 +1:866a +2:1024,1026c +3:985,987c + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:873a +2:1034,1036c +3:995,997c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(String, TypeToken) +====1 +1:876c + Object object = fromJson(json, (Type) classOfT); +2:1039c +3:1000c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:881c + * This method deserializes the specified Json into an object of the specified type. This method +2:1044c +3:1005c + * This method deserializes the specified JSON into an object of the specified type. This method +====1 +1:883c + * {@link #fromJson(String, Class)} instead. If you have the Json in a {@link Reader} instead of +2:1046c +3:1007c + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of +====1 +1:885a +2:1049,1082c +3:1010,1043c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(String, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, + * or if there is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is + * not desired. + * + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the string. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(String, Class) + * @see #fromJson(String, TypeToken) + */ + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the specified JSON into an object of the specified type. This method + * is useful if the specified object is a generic type. For non-generic objects, use + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of + * a String, use {@link #fromJson(Reader, TypeToken)} instead. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * +====1 +1:888,889c + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for +2:1085,1086c +3:1046,1047c + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for +====1 +1:892c + * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType(); +2:1089c +3:1050c + * new TypeToken<Collection<Foo>>(){} +====1 +1:896,897c + * @throws JsonParseException if json is not a valid representation for an object of type typeOfT + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1093,1097c +3:1054,1058c + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(String, Class) + * @since 2.10 +====1 +1:899,900c + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { +2:1099c +3:1060c + public T fromJson(String json, TypeToken typeOfT) throws JsonSyntaxException { +====1 +1:905,906c + T target = (T) fromJson(reader, typeOfT); + return target; +2:1104c +3:1065c + return fromJson(reader, typeOfT); +====1 +1:910c + * This method deserializes the Json read from the specified reader into an object of the +2:1108c +3:1069c + * This method deserializes the JSON read from the specified reader into an object of the +====1 +1:914c + * this method works fine if the any of the fields of the specified object are generics, just the +2:1112c +3:1073c + * this method works fine if any of the fields of the specified object are generics, just the +====1 +1:916c + * invoke {@link #fromJson(Reader, Type)}. If you have the Json in a String form instead of a +2:1114c +3:1075c + * invoke {@link #fromJson(Reader, TypeToken)}. If you have the JSON in a String form instead of a +====1 +1:918a +2:1117,1119c +3:1078,1080c + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:920c + * @param json the reader producing the Json from which the object is to be deserialized. +2:1121c +3:1082c + * @param json the reader producing the JSON from which the object is to be deserialized. +====1 +1:922c + * @return an object of type T from the string. Returns {@code null} if {@code json} is at EOF. +2:1123c +3:1084c + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====1 +1:924c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1125c +3:1086c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====1 +1:925a +2:1127,1129c +3:1088,1090c + * + * @see #fromJson(String, Class) + * @see #fromJson(Reader, TypeToken) +====1 +1:928,930c + JsonReader jsonReader = newJsonReader(json); + Object object = fromJson(jsonReader, classOfT); + assertFullConsumption(object, jsonReader); +2:1132c +3:1093c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:935c + * This method deserializes the Json read from the specified reader into an object of the +2:1137c +3:1098c + * This method deserializes the JSON read from the specified reader into an object of the +====1 +1:937c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the Json in a +2:1139c +3:1100c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a +====1 +1:939a +2:1142,1149c +3:1103,1110c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(Reader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:941,948c + * @param json the reader producing Json from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is at EOF. +2:1151,1153c +3:1112,1114c + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====1 +1:950c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1155c +3:1116c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====1 +1:951a +2:1157,1160c +3:1118,1121c + * + * @see #fromJson(String, Type) + * @see #fromJson(Reader, Class) + * @see #fromJson(Reader, TypeToken) +====1 +1:954a +2:1164,1192c +3:1125,1153c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified reader into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a + * String form instead of a {@link Reader}, use {@link #fromJson(String, TypeToken)} instead. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * + * @param the type of the desired object + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. + * @throws JsonIOException if there was a problem reading from the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type of typeOfT + * + * @see #fromJson(String, TypeToken) + * @see #fromJson(Reader, Class) + * @since 2.10 + */ + public T fromJson(Reader json, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====1 +1:956c + T object = (T) fromJson(jsonReader, typeOfT); +2:1194c +3:1155c + T object = fromJson(jsonReader, typeOfT); +====1 +1:964c + throw new JsonIOException("JSON document was not fully consumed."); +2:1202c +3:1163c + throw new JsonSyntaxException("JSON document was not fully consumed."); +====1 +1:972a +2:1211,1213c +3:1172,1174c + // fromJson(JsonReader, Class) is unfortunately missing and cannot be added now without breaking + // source compatibility in certain cases, see https://github.com/google/gson/pull/1700#discussion_r973764414 + +====1 +1:974c + * Reads the next JSON value from {@code reader} and convert it to an object +2:1215c +3:1176c + * Reads the next JSON value from {@code reader} and converts it to an object +====1 +1:976c + * Since Type is not parameterized by T, this method is type unsafe and should be used carefully +2:1216a +3:1177a +====1 +1:978,979c + * @throws JsonIOException if there was a problem writing to the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1218,1239c +3:1179,1200c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonReader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. If the provided type is a + * {@code Class} the {@code TypeToken} can be created with {@link TypeToken#get(Class)}. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonReader, TypeToken) +====1 +1:982a +2:1243,1276c +3:1204,1237c + return (T) fromJson(reader, TypeToken.get(typeOfT)); + } + + /** + * Reads the next JSON value from {@code reader} and converts it to an object + * of type {@code typeOfT}. Returns {@code null}, if the {@code reader} is at EOF. + * This method is useful if the specified object is a generic type. For non-generic objects, + * {@link #fromJson(JsonReader, Type)} can be called, or {@link TypeToken#get(Class)} can + * be used to create the type token. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonReader, Type) + * @since 2.10 + */ + public T fromJson(JsonReader reader, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====1 +1:989,990c + TypeToken typeToken = (TypeToken) TypeToken.get(typeOfT); + TypeAdapter typeAdapter = getAdapter(typeToken); +2:1283c +3:1244c + TypeAdapter typeAdapter = getAdapter(typeOfT); +====1 +1:1017c + * This method deserializes the Json read from the specified parse tree into an object of the +2:1310c +3:1271c + * This method deserializes the JSON read from the specified parse tree into an object of the +====1 +1:1021c + * this method works fine if the any of the fields of the specified object are generics, just the +2:1314c +3:1275c + * this method works fine if any of the fields of the specified object are generics, just the +====1 +1:1023c + * invoke {@link #fromJson(JsonElement, Type)}. +2:1316,1317c +3:1277,1278c + * invoke {@link #fromJson(JsonElement, TypeToken)}. + * +====1 +1:1028c + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +2:1322c +3:1283c + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====1 +1:1030c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +2:1324c +3:1285c + * @throws JsonSyntaxException if json is not a valid representation for an object of type classOfT +====1 +1:1031a +2:1326,1328c +3:1287,1289c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(JsonElement, TypeToken) +====1 +1:1034c + Object object = fromJson(json, (Type) classOfT); +2:1331c +3:1292c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:1039c + * This method deserializes the Json read from the specified parse tree into an object of the +2:1336c +3:1297c + * This method deserializes the JSON read from the specified parse tree into an object of the +====1 +1:1042a +2:1340,1344c +3:1301,1305c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonElement, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * +====1 +1:1046,1052c + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +2:1348,1349c +3:1309,1310c + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====1 +1:1055a +2:1353,1356c +3:1314,1317c + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonElement, Class) + * @see #fromJson(JsonElement, TypeToken) +====1 +1:1058a +2:1360,1385c +3:1321,1346c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified parse tree into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(JsonElement, Class)} instead. + * + * @param the type of the desired object + * @param json the root of the parse tree of {@link JsonElement}s from which the object is to + * be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *
    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonElement, Class) + * @since 2.10 + */ + public T fromJson(JsonElement json, TypeToken typeOfT) throws JsonSyntaxException { +====1 +1:1062c + return (T) fromJson(new JsonTreeReader(json), typeOfT); +2:1389c +3:1350c + return fromJson(new JsonTreeReader(json), typeOfT); +==== +1:1065,1066c + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate; +2:1392,1402c + <<<<<<< HEAD + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; + ||||||| 47dea2ee + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate; + ======= + static class FutureTypeAdapter extends SerializationDelegatingTypeAdapter { + private TypeAdapter delegate; + >>>>>>> TEMP_RIGHT_BRANCH +3:1353,1355c + static class FutureTypeAdapter extends SerializationDelegatingTypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; +==== +1:1075c + @Override public T read(JsonReader in) throws IOException { +2:1411,1430c + <<<<<<< HEAD + public void markBroken() { + isBroken = true; + } + + private TypeAdapter getResolvedDelegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + ||||||| 47dea2ee + @Override public T read(JsonReader in) throws IOException { + if (delegate == null) { + throw new IllegalStateException(); + ======= + private TypeAdapter delegate() { + if (delegate == null) { + throw new IllegalStateException("Delegate has not been set yet"); + >>>>>>> TEMP_RIGHT_BRANCH + } + <<<<<<< HEAD +3:1364,1372c + public void markBroken() { + isBroken = true; + } + + private TypeAdapter delegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } +====1 +1:1077c + throw new IllegalStateException(); +2:1432,1435c +3:1374,1377c + // Can occur when adapter is leaked to other thread or when adapter is used for (de-)serialization + // directly within the TypeAdapterFactory which requested it + throw new IllegalStateException("Adapter for type with cyclic dependency has been used" + + " before dependency has been resolved"); +==== +1:1079c + return delegate.read(in); +2:1437,1454c + return delegate; + } + + @Override public T read(JsonReader in) throws IOException { + return getResolvedDelegate().read(in); + ||||||| 47dea2ee + return delegate.read(in); + ======= + return delegate; + } + + @Override public TypeAdapter getSerializationDelegate() { + return delegate(); + } + + @Override public T read(JsonReader in) throws IOException { + return delegate().read(in); + >>>>>>> TEMP_RIGHT_BRANCH +3:1379,1387c + return delegate; + } + + @Override public TypeAdapter getSerializationDelegate() { + return delegate(); + } + + @Override public T read(JsonReader in) throws IOException { + return delegate().read(in); +==== +1:1083,1086c + if (delegate == null) { + throw new IllegalStateException(); + } + delegate.write(out, value); +2:1458,1467c + <<<<<<< HEAD + getResolvedDelegate().write(out, value); + ||||||| 47dea2ee + if (delegate == null) { + throw new IllegalStateException(); + } + delegate.write(out, value); + ======= + delegate().write(out, value); + >>>>>>> TEMP_RIGHT_BRANCH +3:1391c + delegate().write(out, value); diff --git a/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_histogram/diff_GsonTest.java.txt b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_histogram/diff_GsonTest.java.txt new file mode 100644 index 0000000000..ababbc30f8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_histogram/diff_GsonTest.java.txt @@ -0,0 +1,897 @@ +====1 +1:18a +2:19c +3:19c + import com.google.gson.Gson.FutureTypeAdapter; +====1 +1:19a +2:21c +3:21c + import com.google.gson.reflect.TypeToken; +====1 +1:29a +2:32c +3:32c + import java.util.Collections; +==== +1:30a +2:34,40c + <<<<<<< HEAD + import java.util.concurrent.CountDownLatch; + ||||||| 47dea2ee + ======= + import java.util.concurrent.atomic.AtomicInteger; + >>>>>>> TEMP_RIGHT_BRANCH + import java.util.concurrent.atomic.AtomicReference; +3:34,36c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.atomic.AtomicInteger; + import java.util.concurrent.atomic.AtomicReference; +====1 +1:59c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +2:69,70c +3:65,66c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====1 +1:73c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +2:84,85c +3:80,81c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +==== +1:88a +2:101,163c + public void testGetAdapter_Null() { + Gson gson = new Gson(); + try { + gson.getAdapter((TypeToken) null); + fail(); + } catch (NullPointerException e) { + assertEquals("type must not be null", e.getMessage()); + } + } + + public void testGetAdapter_Concurrency() { + class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for test"); + } + } + + final AtomicInteger adapterInstancesCreated = new AtomicInteger(0); + final AtomicReference> threadAdapter = new AtomicReference<>(); + final Class requestedType = Number.class; + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + private volatile boolean isFirstCall = true; + + @Override public TypeAdapter create(final Gson gson, TypeToken type) { + if (isFirstCall) { + isFirstCall = false; + + // Create a separate thread which requests an adapter for the same type + // This will cause this factory to return a different adapter instance than + // the one it is currently creating + Thread thread = new Thread() { + @Override public void run() { + threadAdapter.set(gson.getAdapter(requestedType)); + } + }; + thread.start(); + try { + thread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + // Create a new dummy adapter instance + adapterInstancesCreated.incrementAndGet(); + return new DummyAdapter<>(); + } + }) + .create(); + + TypeAdapter adapter = gson.getAdapter(requestedType); + assertTrue(adapter instanceof DummyAdapter); + assertEquals(2, adapterInstancesCreated.get()); + // Should be the same adapter instance the concurrent thread received + assertSame(threadAdapter.get(), adapter); + } + +3:97,149c + public void testGetAdapter_Null() { + Gson gson = new Gson(); + try { + gson.getAdapter((TypeToken) null); + fail(); + } catch (NullPointerException e) { + assertEquals("type must not be null", e.getMessage()); + } + } + + public void testGetAdapter_Concurrency() { + final AtomicInteger adapterInstancesCreated = new AtomicInteger(0); + final AtomicReference> threadAdapter = new AtomicReference<>(); + final Class requestedType = Number.class; + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + private volatile boolean isFirstCall = true; + + @Override public TypeAdapter create(final Gson gson, TypeToken type) { + if (isFirstCall) { + isFirstCall = false; + + // Create a separate thread which requests an adapter for the same type + // This will cause this factory to return a different adapter instance than + // the one it is currently creating + Thread thread = new Thread() { + @Override public void run() { + threadAdapter.set(gson.getAdapter(requestedType)); + } + }; + thread.start(); + try { + thread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + // Create a new dummy adapter instance + adapterInstancesCreated.incrementAndGet(); + return new DummyAdapter<>(); + } + }) + .create(); + + TypeAdapter adapter = gson.getAdapter(requestedType); + assertTrue(adapter instanceof DummyAdapter); + assertEquals(2, adapterInstancesCreated.get()); + // Should be the same adapter instance the concurrent thread received + assertSame(threadAdapter.get(), adapter); + } + +==== +1:154a +2:230,593c + + /** + <<<<<<< HEAD + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + if (callCount == 0) { + callCount++; + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + ||||||| 47dea2ee + ======= + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a + * {@code new Gson()} should not affect the Gson instance it came from. + */ + public void testDefaultGsonNewBuilderModification() { + Gson gson = new Gson(); + GsonBuilder gsonBuilder = gson.newBuilder(); + + // Modifications of `gsonBuilder` should not affect `gson` object + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }); + + assertDefaultGson(gson); + // New GsonBuilder created from `gson` should not have been affected by changes either + assertDefaultGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should use custom adapters + assertCustomGson(gsonBuilder.create()); + } + + private static void assertDefaultGson(Gson gson) { + // Should use default reflective adapter + String json1 = gson.toJson(new CustomClass1()); + assertEquals("{}", json1); + + // Should use default reflective adapter + String json2 = gson.toJson(new CustomClass2()); + assertEquals("{}", json2); + + // Should use default instance creator + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals(CustomClass3.NO_ARG_CONSTRUCTOR_VALUE, customClass3.s); + } + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a custom + * Gson instance (created using a GsonBuilder) should not affect the Gson instance + * it came from. + */ + public void testNewBuilderModification() { + Gson gson = new GsonBuilder() + .registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }) + .registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }) + .registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }) + .create(); + + assertCustomGson(gson); + + // Modify `gson.newBuilder()` + GsonBuilder gsonBuilder = gson.newBuilder(); + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("overwritten custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("overwritten custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("overwritten custom-instance"); + } + }); + + // `gson` object should not have been affected by changes to new GsonBuilder + assertCustomGson(gson); + // New GsonBuilder based on `gson` should not have been affected either + assertCustomGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should be affected by changes + Gson otherGson = gsonBuilder.create(); + String json1 = otherGson.toJson(new CustomClass1()); + assertEquals("\"overwritten custom-adapter\"", json1); + + String json2 = otherGson.toJson(new CustomClass2()); + assertEquals("\"overwritten custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = otherGson.fromJson("{}", CustomClass3.class); + assertEquals("overwritten custom-instance", customClass3.s); + } + + private static void assertCustomGson(Gson gson) { + String json1 = gson.toJson(new CustomClass1()); + assertEquals("\"custom-adapter\"", json1); + + String json2 = gson.toJson(new CustomClass2()); + assertEquals("\"custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals("custom-instance", customClass3.s); + } + + static class CustomClass1 { } + static class CustomClass2 { } + static class CustomClass3 { + static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance"; + + final String s; + + public CustomClass3(String s) { + this.s = s; + } + + public CustomClass3() { + this(NO_ARG_CONSTRUCTOR_VALUE); + } + >>>>>>> TEMP_RIGHT_BRANCH + } +3:216,581c + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a + * {@code new Gson()} should not affect the Gson instance it came from. + */ + public void testDefaultGsonNewBuilderModification() { + Gson gson = new Gson(); + GsonBuilder gsonBuilder = gson.newBuilder(); + + // Modifications of `gsonBuilder` should not affect `gson` object + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }); + + assertDefaultGson(gson); + // New GsonBuilder created from `gson` should not have been affected by changes either + assertDefaultGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should use custom adapters + assertCustomGson(gsonBuilder.create()); + } + + private static void assertDefaultGson(Gson gson) { + // Should use default reflective adapter + String json1 = gson.toJson(new CustomClass1()); + assertEquals("{}", json1); + + // Should use default reflective adapter + String json2 = gson.toJson(new CustomClass2()); + assertEquals("{}", json2); + + // Should use default instance creator + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals(CustomClass3.NO_ARG_CONSTRUCTOR_VALUE, customClass3.s); + } + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a custom + * Gson instance (created using a GsonBuilder) should not affect the Gson instance + * it came from. + */ + public void testNewBuilderModification() { + Gson gson = new GsonBuilder() + .registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }) + .registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }) + .registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }) + .create(); + + assertCustomGson(gson); + + // Modify `gson.newBuilder()` + GsonBuilder gsonBuilder = gson.newBuilder(); + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("overwritten custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("overwritten custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("overwritten custom-instance"); + } + }); + + // `gson` object should not have been affected by changes to new GsonBuilder + assertCustomGson(gson); + // New GsonBuilder based on `gson` should not have been affected either + assertCustomGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should be affected by changes + Gson otherGson = gsonBuilder.create(); + String json1 = otherGson.toJson(new CustomClass1()); + assertEquals("\"overwritten custom-adapter\"", json1); + + String json2 = otherGson.toJson(new CustomClass2()); + assertEquals("\"overwritten custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = otherGson.fromJson("{}", CustomClass3.class); + assertEquals("overwritten custom-instance", customClass3.s); + } + + private static void assertCustomGson(Gson gson) { + String json1 = gson.toJson(new CustomClass1()); + assertEquals("\"custom-adapter\"", json1); + + String json2 = gson.toJson(new CustomClass2()); + assertEquals("\"custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals("custom-instance", customClass3.s); + } + + static class CustomClass1 { } + static class CustomClass2 { } + static class CustomClass3 { + static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance"; + + final String s; + + public CustomClass3(String s) { + this.s = s; + } + + public CustomClass3() { + this(NO_ARG_CONSTRUCTOR_VALUE); + } + } + + /** + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + try { + if (callCount++ == 0) { + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } finally { + callCount--; + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + } diff --git a/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_ignorespace/diff_Gson.java.txt b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_ignorespace/diff_Gson.java.txt new file mode 100644 index 0000000000..a4311f32ed --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_ignorespace/diff_Gson.java.txt @@ -0,0 +1,1221 @@ +====1 +1:19,37c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; + +2:18a +3:18a +====1 +1:53a +2:35c +3:35c + import com.google.gson.internal.bind.SerializationDelegatingTypeAdapter; +==== +1:60a +2:43,70c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + <<<<<<< HEAD + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + ||||||| 47dea2ee + ======= + import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + >>>>>>> TEMP_RIGHT_BRANCH + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +3:43,63c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +====1 +1:78c + * String json = gson.toJson(target); // serializes target to Json +2:88c +3:81c + * String json = gson.toJson(target); // serializes target to JSON +====1 +1:82,86c + *

    If the object that your are serializing/deserializing is a {@code ParameterizedType} + * (i.e. contains at least one type parameter and may be an array) then you must use the + * {@link #toJson(Object, Type)} or {@link #fromJson(String, Type)} method. Here is an + * example for serializing and deserializing a {@code ParameterizedType}: + * +2:92,98c +3:85,91c + *

    If the type of the object that you are converting is a {@code ParameterizedType} + * (i.e. has at least one type argument, for example {@code List}) then for + * deserialization you must use a {@code fromJson} method with {@link Type} or {@link TypeToken} + * parameter to specify the parameterized type. For serialization specifying a {@code Type} + * or {@code TypeToken} is optional, otherwise Gson will use the runtime type of the object. + * {@link TypeToken} is a class provided by Gson which helps creating parameterized types. + * Here is an example showing how this can be done: +====1 +1:88,90c + * Type listType = new TypeToken<List<String>>() {}.getType(); + * List<String> target = new LinkedList<String>(); + * target.add("blah"); +2:100,102c +3:93,95c + * TypeToken<List<MyType>> listType = new TypeToken<List<MyType>>() {}; + * List<MyType> target = new LinkedList<MyType>(); + * target.add(new MyType(1, "abc")); +====1 +1:93,94c + * String json = gson.toJson(target, listType); + * List<String> target2 = gson.fromJson(json, listType); +2:105,110c +3:98,103c + * // For serialization you normally do not have to specify the type, Gson will use + * // the runtime type of the objects, however you can also specify it explicitly + * String json = gson.toJson(target, listType.getType()); + * + * // But for deserialization you have to specify the type + * List<MyType> target2 = gson.fromJson(json, listType); +====1 +1:97c + *

    See the Gson User Guide +2:113c +3:106c + *

    See the Gson User Guide +====1 +1:100c + * @see com.google.gson.reflect.TypeToken +2:116,143c +3:109,136c + *

    Lenient JSON handling

    + * For legacy reasons most of the {@code Gson} methods allow JSON data which does not + * comply with the JSON specification, regardless of whether {@link GsonBuilder#setLenient()} + * is used or not. If this behavior is not desired, the following workarounds can be used: + * + *

    Serialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be serialized + *
    2. When using an existing {@code JsonWriter}, manually apply the writer settings of this + * {@code Gson} instance listed by {@link #newJsonWriter(Writer)}.
      + * Otherwise, when not using an existing {@code JsonWriter}, use {@link #newJsonWriter(Writer)} + * to construct one. + *
    3. Call {@link TypeAdapter#write(JsonWriter, Object)} + *
    + * + *

    Deserialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be deserialized + *
    2. When using an existing {@code JsonReader}, manually apply the reader settings of this + * {@code Gson} instance listed by {@link #newJsonReader(Reader)}.
      + * Otherwise, when not using an existing {@code JsonReader}, use {@link #newJsonReader(Reader)} + * to construct one. + *
    3. Call {@link TypeAdapter#read(JsonReader)} + *
    4. Call {@link JsonReader#peek()} and verify that the result is {@link JsonToken#END_DOCUMENT} + * to make sure there is no trailing data + *
    + * + * @see TypeToken +====1 +1:120c + private static final TypeToken NULL_KEY_SURROGATE = TypeToken.get(Object.class); +2:162a +3:155a +==== +1:130,133c + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal, FutureTypeAdapter>>(); + + private final Map, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); +2:172,189c + <<<<<<< HEAD + // Uses LinkedHashMap because iteration order is important, see getAdapter() implementation below + private final ThreadLocal, TypeAdapter>> calls = new ThreadLocal<>(); + ||||||| 47dea2ee + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal, FutureTypeAdapter>>(); + ======= + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal<>(); + >>>>>>> TEMP_RIGHT_BRANCH + + <<<<<<< HEAD + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); + ||||||| 47dea2ee + private final Map, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); + ======= + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap<>(); + >>>>>>> TEMP_RIGHT_BRANCH +3:165,168c + // Uses LinkedHashMap because iteration order is important, see getAdapter() implementation below + private final ThreadLocal, TypeAdapter>> calls = new ThreadLocal<>(); + + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap<>(); +====1 +1:158a +2:215c +3:194c + final List reflectionFilters; +====1 +1:185c + *
  • The default field naming policy for the output Json is same as in Java. So, a Java class +2:242c +3:221c + *
  • The default field naming policy for the output JSON is same as in Java. So, a Java class +====1 +1:187c + * Json. The same rules are applied for mapping incoming Json to the Java classes. You can +2:244c +3:223c + * JSON. The same rules are applied for mapping incoming JSON to the Java classes. You can +====1 +1:202c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY); +2:259,260c +3:238,239c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====1 +1:214c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy) { +2:272,273c +3:251,252c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy, + List reflectionFilters) { +====1 +1:218c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe); +2:277c +3:256c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe, reflectionFilters); +====1 +1:234a +2:294c +3:273c + this.reflectionFilters = reflectionFilters; +====1 +1:236c + List factories = new ArrayList(); +2:296c +3:275c + List factories = new ArrayList<>(); +====1 +1:299c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory)); +2:359c +3:338c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory, reflectionFilters)); +====1 +1:308a +2:369c +3:348c + * @since 2.8.3 +====1 +1:371c + out.value(value); +2:432c +3:411c + out.value(doubleValue); +====1 +1:395c + out.value(value); +2:456,459c +3:435,438c + // For backward compatibility don't call `JsonWriter.value(float)` because that method has + // been newly added and not all custom JsonWriter implementations might override it yet + Number floatNumber = value instanceof Float ? value : floatValue; + out.value(floatNumber); +====1 +1:452c + List list = new ArrayList(); +2:516c +3:495c + List list = new ArrayList<>(); +====1 +1:475c + @SuppressWarnings("unchecked") +2:538a +3:517a +====1 +1:477c + TypeAdapter cached = typeTokenCache.get(type == null ? NULL_KEY_SURROGATE : type); +2:540,541c +3:519,520c + Objects.requireNonNull(type, "type must not be null"); + TypeAdapter cached = typeTokenCache.get(type); +====1 +1:479c + return (TypeAdapter) cached; +2:543,545c +3:522,524c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) cached; + return adapter; +====1 +1:482,483c + Map, FutureTypeAdapter> threadCalls = calls.get(); + boolean requiresThreadLocalCleanup = false; +2:548,549c +3:527,528c + LinkedHashMap, TypeAdapter> threadCalls = calls.get(); + boolean isInitialAdapterRequest = false; +==== +1:485c + threadCalls = new HashMap, FutureTypeAdapter>(); +2:551,557c + <<<<<<< HEAD + threadCalls = new LinkedHashMap<>(); + ||||||| 47dea2ee + threadCalls = new HashMap, FutureTypeAdapter>(); + ======= + threadCalls = new HashMap<>(); + >>>>>>> TEMP_RIGHT_BRANCH +3:530c + threadCalls = new LinkedHashMap<>(); +====1 +1:487c + requiresThreadLocalCleanup = true; +2:559c +3:532c + isInitialAdapterRequest = true; +==== +1:491c + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); +2:563,570c + <<<<<<< HEAD + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); + ||||||| 47dea2ee + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); + ======= + @SuppressWarnings("unchecked") + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); + >>>>>>> TEMP_RIGHT_BRANCH +3:536,537c + @SuppressWarnings("unchecked") + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); +====1 +1:495a +2:575,576c +3:542,543c + int existingAdaptersCount = threadCalls.size(); + boolean foundCandidate = false; +====1 +1:497c + FutureTypeAdapter call = new FutureTypeAdapter(); +2:578c +3:545c + FutureTypeAdapter call = new FutureTypeAdapter<>(); +====2 +1:502a +3:550a +2:584,590c + @SuppressWarnings("unchecked") + TypeAdapter existingAdapter = (TypeAdapter) typeTokenCache.putIfAbsent(type, candidate); + // If other thread concurrently added adapter prefer that one instead + if (existingAdapter != null) { + candidate = existingAdapter; + } + +==== +1:504c + typeTokenCache.put(type, candidate); +2:592,609c + <<<<<<< HEAD + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + } + foundCandidate = true; + ||||||| 47dea2ee + typeTokenCache.put(type, candidate); + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:552,570c + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + + @SuppressWarnings("unchecked") + TypeAdapter actualAdapter = (TypeAdapter) typeTokenCache.get(type); + // Prefer the actual adapter, in case putIfAbsent call above had no effect because other + // thread already concurrently added other adapter instance for the same type + candidate = actualAdapter; + } + foundCandidate = true; +====1 +1:510,512c + threadCalls.remove(type); + + if (requiresThreadLocalCleanup) { +2:615c +3:576c + if (isInitialAdapterRequest) { +==== +1:514a +2:618,635c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +3:579,596c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +====1 +1:607c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:728c +3:689c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:611,612c + * @param src the object for which Json representation is to be created setting for Gson + * @return Json representation of {@code src}. +2:732,733c +3:693,694c + * @param src the object for which JSON representation is to be created + * @return JSON representation of {@code src}. +====1 +1:613a +2:735,736c +3:696,697c + * + * @see #toJsonTree(Object, Type) +====1 +1:636a +2:760,761c +3:721,722c + * + * @see #toJsonTree(Object) +====1 +1:645c + * This method serializes the specified object into its equivalent Json representation. +2:770c +3:731c + * This method serializes the specified object into its equivalent JSON representation. +====1 +1:649c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:774c +3:735c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:654c + * @param src the object for which Json representation is to be created setting for Gson +2:779c +3:740c + * @param src the object for which JSON representation is to be created +====1 +1:655a +2:781,783c +3:742,744c + * + * @see #toJson(Object, Appendable) + * @see #toJson(Object, Type) +====1 +1:666c + * equivalent Json representation. This method must be used if the specified object is a generic +2:794c +3:755c + * equivalent JSON representation. This method must be used if the specified object is a generic +====1 +1:677c + * @return Json representation of {@code src} +2:805,808c +3:766,769c + * @return JSON representation of {@code src} + * + * @see #toJson(Object, Type, Appendable) + * @see #toJson(Object) +====1 +1:686c + * This method serializes the specified object into its equivalent Json representation. +2:817,818c +3:778,779c + * This method serializes the specified object into its equivalent JSON representation and + * writes it to the writer. +====1 +1:690c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:822c +3:783c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:694,695c + * @param src the object for which Json representation is to be created setting for Gson + * @param writer Writer to which the Json representation needs to be written +2:826,827c +3:787,788c + * @param src the object for which JSON representation is to be created + * @param writer Writer to which the JSON representation needs to be written +====1 +1:697a +2:830,832c +3:791,793c + * + * @see #toJson(Object) + * @see #toJson(Object, Type, Appendable) +====1 +1:709,710c + * equivalent Json representation. This method must be used if the specified object is a generic + * type. For non-generic objects, use {@link #toJson(Object, Appendable)} instead. +2:844,846c +3:805,807c + * equivalent JSON representation and writes it to the writer. + * This method must be used if the specified object is a generic type. For non-generic objects, + * use {@link #toJson(Object, Appendable)} instead. +====1 +1:719c + * @param writer Writer to which the Json representation of src needs to be written. +2:855c +3:816c + * @param writer Writer to which the JSON representation of src needs to be written. +====1 +1:721a +2:858,860c +3:819,821c + * + * @see #toJson(Object, Type) + * @see #toJson(Object, Appendable) +====1 +1:734a +2:874,882c +3:835,843c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====1 +1:737c + @SuppressWarnings("unchecked") +2:884a +3:845a +====1 +1:739c + TypeAdapter adapter = getAdapter(TypeToken.get(typeOfSrc)); +2:886,887c +3:847,848c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) getAdapter(TypeToken.get(typeOfSrc)); +====1 +1:747c + ((TypeAdapter) adapter).write(writer, src); +2:895c +3:856c + adapter.write(writer, src); +====1 +1:778c + * @param writer Writer to which the Json representation needs to be written +2:926c +3:887c + * @param writer Writer to which the JSON representation needs to be written +====1 +1:832a +2:981,989c +3:942,950c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====1 +1:858c + * This method deserializes the specified Json into an object of the specified class. It is not +2:1015c +3:976c + * This method deserializes the specified JSON into an object of the specified class. It is not +====1 +1:864c + * {@link #fromJson(String, Type)}. If you have the Json in a {@link Reader} instead of +2:1021c +3:982c + * {@link #fromJson(String, TypeToken)}. If you have the JSON in a {@link Reader} instead of +====1 +1:866a +2:1024,1026c +3:985,987c + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:873a +2:1034,1036c +3:995,997c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(String, TypeToken) +====1 +1:876c + Object object = fromJson(json, (Type) classOfT); +2:1039c +3:1000c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:881c + * This method deserializes the specified Json into an object of the specified type. This method +2:1044c +3:1005c + * This method deserializes the specified JSON into an object of the specified type. This method +====1 +1:883c + * {@link #fromJson(String, Class)} instead. If you have the Json in a {@link Reader} instead of +2:1046c +3:1007c + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of +====1 +1:885a +2:1049,1082c +3:1010,1043c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(String, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, + * or if there is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is + * not desired. + * + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the string. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(String, Class) + * @see #fromJson(String, TypeToken) + */ + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the specified JSON into an object of the specified type. This method + * is useful if the specified object is a generic type. For non-generic objects, use + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of + * a String, use {@link #fromJson(Reader, TypeToken)} instead. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * +====1 +1:888,889c + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for +2:1085,1086c +3:1046,1047c + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for +====1 +1:892c + * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType(); +2:1089c +3:1050c + * new TypeToken<Collection<Foo>>(){} +====1 +1:896,897c + * @throws JsonParseException if json is not a valid representation for an object of type typeOfT + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1093,1097c +3:1054,1058c + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(String, Class) + * @since 2.10 +====1 +1:899,900c + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { +2:1099c +3:1060c + public T fromJson(String json, TypeToken typeOfT) throws JsonSyntaxException { +====1 +1:905,906c + T target = (T) fromJson(reader, typeOfT); + return target; +2:1104c +3:1065c + return fromJson(reader, typeOfT); +====1 +1:910c + * This method deserializes the Json read from the specified reader into an object of the +2:1108c +3:1069c + * This method deserializes the JSON read from the specified reader into an object of the +====1 +1:914c + * this method works fine if the any of the fields of the specified object are generics, just the +2:1112c +3:1073c + * this method works fine if any of the fields of the specified object are generics, just the +====1 +1:916c + * invoke {@link #fromJson(Reader, Type)}. If you have the Json in a String form instead of a +2:1114c +3:1075c + * invoke {@link #fromJson(Reader, TypeToken)}. If you have the JSON in a String form instead of a +====1 +1:918a +2:1117,1119c +3:1078,1080c + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:920c + * @param json the reader producing the Json from which the object is to be deserialized. +2:1121c +3:1082c + * @param json the reader producing the JSON from which the object is to be deserialized. +====1 +1:922c + * @return an object of type T from the string. Returns {@code null} if {@code json} is at EOF. +2:1123c +3:1084c + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====1 +1:924c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1125c +3:1086c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====1 +1:925a +2:1127,1129c +3:1088,1090c + * + * @see #fromJson(String, Class) + * @see #fromJson(Reader, TypeToken) +====1 +1:928,930c + JsonReader jsonReader = newJsonReader(json); + Object object = fromJson(jsonReader, classOfT); + assertFullConsumption(object, jsonReader); +2:1132c +3:1093c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:935c + * This method deserializes the Json read from the specified reader into an object of the +2:1137c +3:1098c + * This method deserializes the JSON read from the specified reader into an object of the +====1 +1:937c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the Json in a +2:1139c +3:1100c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a +====1 +1:939a +2:1142,1149c +3:1103,1110c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(Reader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:941,948c + * @param json the reader producing Json from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is at EOF. +2:1151,1153c +3:1112,1114c + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====1 +1:950c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1155c +3:1116c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====1 +1:951a +2:1157,1160c +3:1118,1121c + * + * @see #fromJson(String, Type) + * @see #fromJson(Reader, Class) + * @see #fromJson(Reader, TypeToken) +====1 +1:954a +2:1164,1192c +3:1125,1153c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified reader into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a + * String form instead of a {@link Reader}, use {@link #fromJson(String, TypeToken)} instead. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * + * @param the type of the desired object + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. + * @throws JsonIOException if there was a problem reading from the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type of typeOfT + * + * @see #fromJson(String, TypeToken) + * @see #fromJson(Reader, Class) + * @since 2.10 + */ + public T fromJson(Reader json, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====1 +1:956c + T object = (T) fromJson(jsonReader, typeOfT); +2:1194c +3:1155c + T object = fromJson(jsonReader, typeOfT); +====1 +1:964c + throw new JsonIOException("JSON document was not fully consumed."); +2:1202c +3:1163c + throw new JsonSyntaxException("JSON document was not fully consumed."); +====1 +1:972a +2:1211,1213c +3:1172,1174c + // fromJson(JsonReader, Class) is unfortunately missing and cannot be added now without breaking + // source compatibility in certain cases, see https://github.com/google/gson/pull/1700#discussion_r973764414 + +====1 +1:974c + * Reads the next JSON value from {@code reader} and convert it to an object +2:1215c +3:1176c + * Reads the next JSON value from {@code reader} and converts it to an object +====1 +1:976c + * Since Type is not parameterized by T, this method is type unsafe and should be used carefully +2:1216a +3:1177a +====1 +1:978,979c + * @throws JsonIOException if there was a problem writing to the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1218,1239c +3:1179,1200c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonReader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. If the provided type is a + * {@code Class} the {@code TypeToken} can be created with {@link TypeToken#get(Class)}. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonReader, TypeToken) +====1 +1:982a +2:1243,1276c +3:1204,1237c + return (T) fromJson(reader, TypeToken.get(typeOfT)); + } + + /** + * Reads the next JSON value from {@code reader} and converts it to an object + * of type {@code typeOfT}. Returns {@code null}, if the {@code reader} is at EOF. + * This method is useful if the specified object is a generic type. For non-generic objects, + * {@link #fromJson(JsonReader, Type)} can be called, or {@link TypeToken#get(Class)} can + * be used to create the type token. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonReader, Type) + * @since 2.10 + */ + public T fromJson(JsonReader reader, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====1 +1:989,990c + TypeToken typeToken = (TypeToken) TypeToken.get(typeOfT); + TypeAdapter typeAdapter = getAdapter(typeToken); +2:1283c +3:1244c + TypeAdapter typeAdapter = getAdapter(typeOfT); +====1 +1:1017c + * This method deserializes the Json read from the specified parse tree into an object of the +2:1310c +3:1271c + * This method deserializes the JSON read from the specified parse tree into an object of the +====1 +1:1021c + * this method works fine if the any of the fields of the specified object are generics, just the +2:1314c +3:1275c + * this method works fine if any of the fields of the specified object are generics, just the +====1 +1:1023c + * invoke {@link #fromJson(JsonElement, Type)}. +2:1316,1317c +3:1277,1278c + * invoke {@link #fromJson(JsonElement, TypeToken)}. + * +====1 +1:1028c + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +2:1322c +3:1283c + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====1 +1:1030c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +2:1324c +3:1285c + * @throws JsonSyntaxException if json is not a valid representation for an object of type classOfT +====1 +1:1031a +2:1326,1328c +3:1287,1289c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(JsonElement, TypeToken) +====1 +1:1034c + Object object = fromJson(json, (Type) classOfT); +2:1331c +3:1292c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:1039c + * This method deserializes the Json read from the specified parse tree into an object of the +2:1336c +3:1297c + * This method deserializes the JSON read from the specified parse tree into an object of the +====1 +1:1042a +2:1340,1344c +3:1301,1305c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonElement, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * +====1 +1:1046,1052c + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +2:1348,1349c +3:1309,1310c + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====1 +1:1055a +2:1353,1356c +3:1314,1317c + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonElement, Class) + * @see #fromJson(JsonElement, TypeToken) +====1 +1:1058a +2:1360,1385c +3:1321,1346c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified parse tree into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(JsonElement, Class)} instead. + * + * @param the type of the desired object + * @param json the root of the parse tree of {@link JsonElement}s from which the object is to + * be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *
    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonElement, Class) + * @since 2.10 + */ + public T fromJson(JsonElement json, TypeToken typeOfT) throws JsonSyntaxException { +====1 +1:1062c + return (T) fromJson(new JsonTreeReader(json), typeOfT); +2:1389c +3:1350c + return fromJson(new JsonTreeReader(json), typeOfT); +==== +1:1065,1066c + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate; +2:1392,1402c + <<<<<<< HEAD + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; + ||||||| 47dea2ee + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate; + ======= + static class FutureTypeAdapter extends SerializationDelegatingTypeAdapter { + private TypeAdapter delegate; + >>>>>>> TEMP_RIGHT_BRANCH +3:1353,1355c + static class FutureTypeAdapter extends SerializationDelegatingTypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; +==== +1:1075c + @Override public T read(JsonReader in) throws IOException { +2:1411,1425c + <<<<<<< HEAD + public void markBroken() { + isBroken = true; + } + + private TypeAdapter getResolvedDelegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } + ||||||| 47dea2ee + @Override public T read(JsonReader in) throws IOException { + ======= + private TypeAdapter delegate() { + >>>>>>> TEMP_RIGHT_BRANCH +3:1364,1372c + public void markBroken() { + isBroken = true; + } + + private TypeAdapter delegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } +==== +1:1077c + throw new IllegalStateException(); +2:1427,1436c + <<<<<<< HEAD + // Can occur when adapter is leaked to other thread or when adapter is used for (de-)serialization + // directly within the TypeAdapterFactory which requested it + throw new IllegalStateException("Adapter for type with cyclic dependency has been used" + + " before dependency has been resolved"); + ||||||| 47dea2ee + throw new IllegalStateException(); + ======= + throw new IllegalStateException("Delegate has not been set yet"); + >>>>>>> TEMP_RIGHT_BRANCH +3:1374,1377c + // Can occur when adapter is leaked to other thread or when adapter is used for (de-)serialization + // directly within the TypeAdapterFactory which requested it + throw new IllegalStateException("Adapter for type with cyclic dependency has been used" + + " before dependency has been resolved"); +==== +1:1079c + return delegate.read(in); +2:1438,1461c + return delegate; + } + + <<<<<<< HEAD + @Override public T read(JsonReader in) throws IOException { + return getResolvedDelegate().read(in); + ||||||| 47dea2ee + @Override public void write(JsonWriter out, T value) throws IOException { + if (delegate == null) { + throw new IllegalStateException(); + ======= + @Override public TypeAdapter getSerializationDelegate() { + return delegate(); + >>>>>>> TEMP_RIGHT_BRANCH + } + + <<<<<<< HEAD + @Override public void write(JsonWriter out, T value) throws IOException { + getResolvedDelegate().write(out, value); + ||||||| 47dea2ee + delegate.write(out, value); + ======= + @Override public T read(JsonReader in) throws IOException { + return delegate().read(in); +3:1379,1387c + return delegate; + } + + @Override public TypeAdapter getSerializationDelegate() { + return delegate(); + } + + @Override public T read(JsonReader in) throws IOException { + return delegate().read(in); +==== +1:1083,1086c + if (delegate == null) { + throw new IllegalStateException(); + } + delegate.write(out, value); +2:1465,1466c + delegate().write(out, value); + >>>>>>> TEMP_RIGHT_BRANCH +3:1391c + delegate().write(out, value); diff --git a/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_ignorespace/diff_GsonTest.java.txt b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_ignorespace/diff_GsonTest.java.txt new file mode 100644 index 0000000000..ababbc30f8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_ignorespace/diff_GsonTest.java.txt @@ -0,0 +1,897 @@ +====1 +1:18a +2:19c +3:19c + import com.google.gson.Gson.FutureTypeAdapter; +====1 +1:19a +2:21c +3:21c + import com.google.gson.reflect.TypeToken; +====1 +1:29a +2:32c +3:32c + import java.util.Collections; +==== +1:30a +2:34,40c + <<<<<<< HEAD + import java.util.concurrent.CountDownLatch; + ||||||| 47dea2ee + ======= + import java.util.concurrent.atomic.AtomicInteger; + >>>>>>> TEMP_RIGHT_BRANCH + import java.util.concurrent.atomic.AtomicReference; +3:34,36c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.atomic.AtomicInteger; + import java.util.concurrent.atomic.AtomicReference; +====1 +1:59c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +2:69,70c +3:65,66c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====1 +1:73c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +2:84,85c +3:80,81c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +==== +1:88a +2:101,163c + public void testGetAdapter_Null() { + Gson gson = new Gson(); + try { + gson.getAdapter((TypeToken) null); + fail(); + } catch (NullPointerException e) { + assertEquals("type must not be null", e.getMessage()); + } + } + + public void testGetAdapter_Concurrency() { + class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for test"); + } + } + + final AtomicInteger adapterInstancesCreated = new AtomicInteger(0); + final AtomicReference> threadAdapter = new AtomicReference<>(); + final Class requestedType = Number.class; + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + private volatile boolean isFirstCall = true; + + @Override public TypeAdapter create(final Gson gson, TypeToken type) { + if (isFirstCall) { + isFirstCall = false; + + // Create a separate thread which requests an adapter for the same type + // This will cause this factory to return a different adapter instance than + // the one it is currently creating + Thread thread = new Thread() { + @Override public void run() { + threadAdapter.set(gson.getAdapter(requestedType)); + } + }; + thread.start(); + try { + thread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + // Create a new dummy adapter instance + adapterInstancesCreated.incrementAndGet(); + return new DummyAdapter<>(); + } + }) + .create(); + + TypeAdapter adapter = gson.getAdapter(requestedType); + assertTrue(adapter instanceof DummyAdapter); + assertEquals(2, adapterInstancesCreated.get()); + // Should be the same adapter instance the concurrent thread received + assertSame(threadAdapter.get(), adapter); + } + +3:97,149c + public void testGetAdapter_Null() { + Gson gson = new Gson(); + try { + gson.getAdapter((TypeToken) null); + fail(); + } catch (NullPointerException e) { + assertEquals("type must not be null", e.getMessage()); + } + } + + public void testGetAdapter_Concurrency() { + final AtomicInteger adapterInstancesCreated = new AtomicInteger(0); + final AtomicReference> threadAdapter = new AtomicReference<>(); + final Class requestedType = Number.class; + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + private volatile boolean isFirstCall = true; + + @Override public TypeAdapter create(final Gson gson, TypeToken type) { + if (isFirstCall) { + isFirstCall = false; + + // Create a separate thread which requests an adapter for the same type + // This will cause this factory to return a different adapter instance than + // the one it is currently creating + Thread thread = new Thread() { + @Override public void run() { + threadAdapter.set(gson.getAdapter(requestedType)); + } + }; + thread.start(); + try { + thread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + // Create a new dummy adapter instance + adapterInstancesCreated.incrementAndGet(); + return new DummyAdapter<>(); + } + }) + .create(); + + TypeAdapter adapter = gson.getAdapter(requestedType); + assertTrue(adapter instanceof DummyAdapter); + assertEquals(2, adapterInstancesCreated.get()); + // Should be the same adapter instance the concurrent thread received + assertSame(threadAdapter.get(), adapter); + } + +==== +1:154a +2:230,593c + + /** + <<<<<<< HEAD + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + if (callCount == 0) { + callCount++; + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + ||||||| 47dea2ee + ======= + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a + * {@code new Gson()} should not affect the Gson instance it came from. + */ + public void testDefaultGsonNewBuilderModification() { + Gson gson = new Gson(); + GsonBuilder gsonBuilder = gson.newBuilder(); + + // Modifications of `gsonBuilder` should not affect `gson` object + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }); + + assertDefaultGson(gson); + // New GsonBuilder created from `gson` should not have been affected by changes either + assertDefaultGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should use custom adapters + assertCustomGson(gsonBuilder.create()); + } + + private static void assertDefaultGson(Gson gson) { + // Should use default reflective adapter + String json1 = gson.toJson(new CustomClass1()); + assertEquals("{}", json1); + + // Should use default reflective adapter + String json2 = gson.toJson(new CustomClass2()); + assertEquals("{}", json2); + + // Should use default instance creator + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals(CustomClass3.NO_ARG_CONSTRUCTOR_VALUE, customClass3.s); + } + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a custom + * Gson instance (created using a GsonBuilder) should not affect the Gson instance + * it came from. + */ + public void testNewBuilderModification() { + Gson gson = new GsonBuilder() + .registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }) + .registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }) + .registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }) + .create(); + + assertCustomGson(gson); + + // Modify `gson.newBuilder()` + GsonBuilder gsonBuilder = gson.newBuilder(); + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("overwritten custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("overwritten custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("overwritten custom-instance"); + } + }); + + // `gson` object should not have been affected by changes to new GsonBuilder + assertCustomGson(gson); + // New GsonBuilder based on `gson` should not have been affected either + assertCustomGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should be affected by changes + Gson otherGson = gsonBuilder.create(); + String json1 = otherGson.toJson(new CustomClass1()); + assertEquals("\"overwritten custom-adapter\"", json1); + + String json2 = otherGson.toJson(new CustomClass2()); + assertEquals("\"overwritten custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = otherGson.fromJson("{}", CustomClass3.class); + assertEquals("overwritten custom-instance", customClass3.s); + } + + private static void assertCustomGson(Gson gson) { + String json1 = gson.toJson(new CustomClass1()); + assertEquals("\"custom-adapter\"", json1); + + String json2 = gson.toJson(new CustomClass2()); + assertEquals("\"custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals("custom-instance", customClass3.s); + } + + static class CustomClass1 { } + static class CustomClass2 { } + static class CustomClass3 { + static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance"; + + final String s; + + public CustomClass3(String s) { + this.s = s; + } + + public CustomClass3() { + this(NO_ARG_CONSTRUCTOR_VALUE); + } + >>>>>>> TEMP_RIGHT_BRANCH + } +3:216,581c + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a + * {@code new Gson()} should not affect the Gson instance it came from. + */ + public void testDefaultGsonNewBuilderModification() { + Gson gson = new Gson(); + GsonBuilder gsonBuilder = gson.newBuilder(); + + // Modifications of `gsonBuilder` should not affect `gson` object + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }); + + assertDefaultGson(gson); + // New GsonBuilder created from `gson` should not have been affected by changes either + assertDefaultGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should use custom adapters + assertCustomGson(gsonBuilder.create()); + } + + private static void assertDefaultGson(Gson gson) { + // Should use default reflective adapter + String json1 = gson.toJson(new CustomClass1()); + assertEquals("{}", json1); + + // Should use default reflective adapter + String json2 = gson.toJson(new CustomClass2()); + assertEquals("{}", json2); + + // Should use default instance creator + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals(CustomClass3.NO_ARG_CONSTRUCTOR_VALUE, customClass3.s); + } + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a custom + * Gson instance (created using a GsonBuilder) should not affect the Gson instance + * it came from. + */ + public void testNewBuilderModification() { + Gson gson = new GsonBuilder() + .registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }) + .registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }) + .registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }) + .create(); + + assertCustomGson(gson); + + // Modify `gson.newBuilder()` + GsonBuilder gsonBuilder = gson.newBuilder(); + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("overwritten custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("overwritten custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("overwritten custom-instance"); + } + }); + + // `gson` object should not have been affected by changes to new GsonBuilder + assertCustomGson(gson); + // New GsonBuilder based on `gson` should not have been affected either + assertCustomGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should be affected by changes + Gson otherGson = gsonBuilder.create(); + String json1 = otherGson.toJson(new CustomClass1()); + assertEquals("\"overwritten custom-adapter\"", json1); + + String json2 = otherGson.toJson(new CustomClass2()); + assertEquals("\"overwritten custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = otherGson.fromJson("{}", CustomClass3.class); + assertEquals("overwritten custom-instance", customClass3.s); + } + + private static void assertCustomGson(Gson gson) { + String json1 = gson.toJson(new CustomClass1()); + assertEquals("\"custom-adapter\"", json1); + + String json2 = gson.toJson(new CustomClass2()); + assertEquals("\"custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals("custom-instance", customClass3.s); + } + + static class CustomClass1 { } + static class CustomClass2 { } + static class CustomClass3 { + static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance"; + + final String s; + + public CustomClass3(String s) { + this.s = s; + } + + public CustomClass3() { + this(NO_ARG_CONSTRUCTOR_VALUE); + } + } + + /** + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + try { + if (callCount++ == 0) { + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } finally { + callCount--; + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + } diff --git a/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_minimal/diff_Gson.java.txt b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_minimal/diff_Gson.java.txt new file mode 100644 index 0000000000..8b98a8e9f3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_minimal/diff_Gson.java.txt @@ -0,0 +1,1221 @@ +====1 +1:19,37c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; + +2:18a +3:18a +====1 +1:53a +2:35c +3:35c + import com.google.gson.internal.bind.SerializationDelegatingTypeAdapter; +==== +1:60a +2:43,70c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + <<<<<<< HEAD + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + ||||||| 47dea2ee + ======= + import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + >>>>>>> TEMP_RIGHT_BRANCH + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +3:43,63c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +====1 +1:78c + * String json = gson.toJson(target); // serializes target to Json +2:88c +3:81c + * String json = gson.toJson(target); // serializes target to JSON +====1 +1:82,86c + *

    If the object that your are serializing/deserializing is a {@code ParameterizedType} + * (i.e. contains at least one type parameter and may be an array) then you must use the + * {@link #toJson(Object, Type)} or {@link #fromJson(String, Type)} method. Here is an + * example for serializing and deserializing a {@code ParameterizedType}: + * +2:92,98c +3:85,91c + *

    If the type of the object that you are converting is a {@code ParameterizedType} + * (i.e. has at least one type argument, for example {@code List}) then for + * deserialization you must use a {@code fromJson} method with {@link Type} or {@link TypeToken} + * parameter to specify the parameterized type. For serialization specifying a {@code Type} + * or {@code TypeToken} is optional, otherwise Gson will use the runtime type of the object. + * {@link TypeToken} is a class provided by Gson which helps creating parameterized types. + * Here is an example showing how this can be done: +====1 +1:88,90c + * Type listType = new TypeToken<List<String>>() {}.getType(); + * List<String> target = new LinkedList<String>(); + * target.add("blah"); +2:100,102c +3:93,95c + * TypeToken<List<MyType>> listType = new TypeToken<List<MyType>>() {}; + * List<MyType> target = new LinkedList<MyType>(); + * target.add(new MyType(1, "abc")); +====1 +1:93,94c + * String json = gson.toJson(target, listType); + * List<String> target2 = gson.fromJson(json, listType); +2:105,110c +3:98,103c + * // For serialization you normally do not have to specify the type, Gson will use + * // the runtime type of the objects, however you can also specify it explicitly + * String json = gson.toJson(target, listType.getType()); + * + * // But for deserialization you have to specify the type + * List<MyType> target2 = gson.fromJson(json, listType); +====1 +1:97c + *

    See the Gson User Guide +2:113c +3:106c + *

    See the Gson User Guide +====1 +1:100c + * @see com.google.gson.reflect.TypeToken +2:116,143c +3:109,136c + *

    Lenient JSON handling

    + * For legacy reasons most of the {@code Gson} methods allow JSON data which does not + * comply with the JSON specification, regardless of whether {@link GsonBuilder#setLenient()} + * is used or not. If this behavior is not desired, the following workarounds can be used: + * + *

    Serialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be serialized + *
    2. When using an existing {@code JsonWriter}, manually apply the writer settings of this + * {@code Gson} instance listed by {@link #newJsonWriter(Writer)}.
      + * Otherwise, when not using an existing {@code JsonWriter}, use {@link #newJsonWriter(Writer)} + * to construct one. + *
    3. Call {@link TypeAdapter#write(JsonWriter, Object)} + *
    + * + *

    Deserialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be deserialized + *
    2. When using an existing {@code JsonReader}, manually apply the reader settings of this + * {@code Gson} instance listed by {@link #newJsonReader(Reader)}.
      + * Otherwise, when not using an existing {@code JsonReader}, use {@link #newJsonReader(Reader)} + * to construct one. + *
    3. Call {@link TypeAdapter#read(JsonReader)} + *
    4. Call {@link JsonReader#peek()} and verify that the result is {@link JsonToken#END_DOCUMENT} + * to make sure there is no trailing data + *
    + * + * @see TypeToken +====1 +1:120c + private static final TypeToken NULL_KEY_SURROGATE = TypeToken.get(Object.class); +2:162a +3:155a +==== +1:130,133c + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal, FutureTypeAdapter>>(); + + private final Map, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); +2:172,189c + <<<<<<< HEAD + // Uses LinkedHashMap because iteration order is important, see getAdapter() implementation below + private final ThreadLocal, TypeAdapter>> calls = new ThreadLocal<>(); + ||||||| 47dea2ee + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal, FutureTypeAdapter>>(); + ======= + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal<>(); + >>>>>>> TEMP_RIGHT_BRANCH + + <<<<<<< HEAD + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); + ||||||| 47dea2ee + private final Map, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); + ======= + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap<>(); + >>>>>>> TEMP_RIGHT_BRANCH +3:165,168c + // Uses LinkedHashMap because iteration order is important, see getAdapter() implementation below + private final ThreadLocal, TypeAdapter>> calls = new ThreadLocal<>(); + + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap<>(); +====1 +1:158a +2:215c +3:194c + final List reflectionFilters; +====1 +1:185c + *
  • The default field naming policy for the output Json is same as in Java. So, a Java class +2:242c +3:221c + *
  • The default field naming policy for the output JSON is same as in Java. So, a Java class +====1 +1:187c + * Json. The same rules are applied for mapping incoming Json to the Java classes. You can +2:244c +3:223c + * JSON. The same rules are applied for mapping incoming JSON to the Java classes. You can +====1 +1:202c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY); +2:259,260c +3:238,239c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====1 +1:214c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy) { +2:272,273c +3:251,252c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy, + List reflectionFilters) { +====1 +1:218c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe); +2:277c +3:256c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe, reflectionFilters); +====1 +1:234a +2:294c +3:273c + this.reflectionFilters = reflectionFilters; +====1 +1:236c + List factories = new ArrayList(); +2:296c +3:275c + List factories = new ArrayList<>(); +====1 +1:299c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory)); +2:359c +3:338c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory, reflectionFilters)); +====1 +1:308a +2:369c +3:348c + * @since 2.8.3 +====1 +1:371c + out.value(value); +2:432c +3:411c + out.value(doubleValue); +====1 +1:395c + out.value(value); +2:456,459c +3:435,438c + // For backward compatibility don't call `JsonWriter.value(float)` because that method has + // been newly added and not all custom JsonWriter implementations might override it yet + Number floatNumber = value instanceof Float ? value : floatValue; + out.value(floatNumber); +====1 +1:452c + List list = new ArrayList(); +2:516c +3:495c + List list = new ArrayList<>(); +====1 +1:475c + @SuppressWarnings("unchecked") +2:538a +3:517a +====1 +1:477c + TypeAdapter cached = typeTokenCache.get(type == null ? NULL_KEY_SURROGATE : type); +2:540,541c +3:519,520c + Objects.requireNonNull(type, "type must not be null"); + TypeAdapter cached = typeTokenCache.get(type); +====1 +1:479c + return (TypeAdapter) cached; +2:543,545c +3:522,524c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) cached; + return adapter; +====1 +1:482,483c + Map, FutureTypeAdapter> threadCalls = calls.get(); + boolean requiresThreadLocalCleanup = false; +2:548,549c +3:527,528c + LinkedHashMap, TypeAdapter> threadCalls = calls.get(); + boolean isInitialAdapterRequest = false; +==== +1:485c + threadCalls = new HashMap, FutureTypeAdapter>(); +2:551,557c + <<<<<<< HEAD + threadCalls = new LinkedHashMap<>(); + ||||||| 47dea2ee + threadCalls = new HashMap, FutureTypeAdapter>(); + ======= + threadCalls = new HashMap<>(); + >>>>>>> TEMP_RIGHT_BRANCH +3:530c + threadCalls = new LinkedHashMap<>(); +====1 +1:487c + requiresThreadLocalCleanup = true; +2:559c +3:532c + isInitialAdapterRequest = true; +==== +1:491c + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); +2:563,570c + <<<<<<< HEAD + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); + ||||||| 47dea2ee + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); + ======= + @SuppressWarnings("unchecked") + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); + >>>>>>> TEMP_RIGHT_BRANCH +3:536,537c + @SuppressWarnings("unchecked") + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); +====1 +1:495a +2:575,576c +3:542,543c + int existingAdaptersCount = threadCalls.size(); + boolean foundCandidate = false; +====1 +1:497c + FutureTypeAdapter call = new FutureTypeAdapter(); +2:578c +3:545c + FutureTypeAdapter call = new FutureTypeAdapter<>(); +====2 +1:502a +3:550a +2:584,590c + @SuppressWarnings("unchecked") + TypeAdapter existingAdapter = (TypeAdapter) typeTokenCache.putIfAbsent(type, candidate); + // If other thread concurrently added adapter prefer that one instead + if (existingAdapter != null) { + candidate = existingAdapter; + } + +==== +1:504c + typeTokenCache.put(type, candidate); +2:592,609c + <<<<<<< HEAD + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + } + foundCandidate = true; + ||||||| 47dea2ee + typeTokenCache.put(type, candidate); + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:552,570c + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + + @SuppressWarnings("unchecked") + TypeAdapter actualAdapter = (TypeAdapter) typeTokenCache.get(type); + // Prefer the actual adapter, in case putIfAbsent call above had no effect because other + // thread already concurrently added other adapter instance for the same type + candidate = actualAdapter; + } + foundCandidate = true; +====1 +1:510,512c + threadCalls.remove(type); + + if (requiresThreadLocalCleanup) { +2:615c +3:576c + if (isInitialAdapterRequest) { +==== +1:514a +2:618,635c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +3:579,596c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +====1 +1:607c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:728c +3:689c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:611,612c + * @param src the object for which Json representation is to be created setting for Gson + * @return Json representation of {@code src}. +2:732,733c +3:693,694c + * @param src the object for which JSON representation is to be created + * @return JSON representation of {@code src}. +====1 +1:613a +2:735,736c +3:696,697c + * + * @see #toJsonTree(Object, Type) +====1 +1:636a +2:760,761c +3:721,722c + * + * @see #toJsonTree(Object) +====1 +1:645c + * This method serializes the specified object into its equivalent Json representation. +2:770c +3:731c + * This method serializes the specified object into its equivalent JSON representation. +====1 +1:649c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:774c +3:735c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:654c + * @param src the object for which Json representation is to be created setting for Gson +2:779c +3:740c + * @param src the object for which JSON representation is to be created +====1 +1:655a +2:781,783c +3:742,744c + * + * @see #toJson(Object, Appendable) + * @see #toJson(Object, Type) +====1 +1:666c + * equivalent Json representation. This method must be used if the specified object is a generic +2:794c +3:755c + * equivalent JSON representation. This method must be used if the specified object is a generic +====1 +1:677c + * @return Json representation of {@code src} +2:805,808c +3:766,769c + * @return JSON representation of {@code src} + * + * @see #toJson(Object, Type, Appendable) + * @see #toJson(Object) +====1 +1:686c + * This method serializes the specified object into its equivalent Json representation. +2:817,818c +3:778,779c + * This method serializes the specified object into its equivalent JSON representation and + * writes it to the writer. +====1 +1:690c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:822c +3:783c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:694,695c + * @param src the object for which Json representation is to be created setting for Gson + * @param writer Writer to which the Json representation needs to be written +2:826,827c +3:787,788c + * @param src the object for which JSON representation is to be created + * @param writer Writer to which the JSON representation needs to be written +====1 +1:697a +2:830,832c +3:791,793c + * + * @see #toJson(Object) + * @see #toJson(Object, Type, Appendable) +====1 +1:709,710c + * equivalent Json representation. This method must be used if the specified object is a generic + * type. For non-generic objects, use {@link #toJson(Object, Appendable)} instead. +2:844,846c +3:805,807c + * equivalent JSON representation and writes it to the writer. + * This method must be used if the specified object is a generic type. For non-generic objects, + * use {@link #toJson(Object, Appendable)} instead. +====1 +1:719c + * @param writer Writer to which the Json representation of src needs to be written. +2:855c +3:816c + * @param writer Writer to which the JSON representation of src needs to be written. +====1 +1:721a +2:858,860c +3:819,821c + * + * @see #toJson(Object, Type) + * @see #toJson(Object, Appendable) +====1 +1:734a +2:874,882c +3:835,843c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====1 +1:737c + @SuppressWarnings("unchecked") +2:884a +3:845a +====1 +1:739c + TypeAdapter adapter = getAdapter(TypeToken.get(typeOfSrc)); +2:886,887c +3:847,848c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) getAdapter(TypeToken.get(typeOfSrc)); +====1 +1:747c + ((TypeAdapter) adapter).write(writer, src); +2:895c +3:856c + adapter.write(writer, src); +====1 +1:778c + * @param writer Writer to which the Json representation needs to be written +2:926c +3:887c + * @param writer Writer to which the JSON representation needs to be written +====1 +1:832a +2:981,989c +3:942,950c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====1 +1:858c + * This method deserializes the specified Json into an object of the specified class. It is not +2:1015c +3:976c + * This method deserializes the specified JSON into an object of the specified class. It is not +====1 +1:864c + * {@link #fromJson(String, Type)}. If you have the Json in a {@link Reader} instead of +2:1021c +3:982c + * {@link #fromJson(String, TypeToken)}. If you have the JSON in a {@link Reader} instead of +====1 +1:866a +2:1024,1026c +3:985,987c + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:873a +2:1034,1036c +3:995,997c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(String, TypeToken) +====1 +1:876c + Object object = fromJson(json, (Type) classOfT); +2:1039c +3:1000c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:881c + * This method deserializes the specified Json into an object of the specified type. This method +2:1044c +3:1005c + * This method deserializes the specified JSON into an object of the specified type. This method +====1 +1:883c + * {@link #fromJson(String, Class)} instead. If you have the Json in a {@link Reader} instead of +2:1046c +3:1007c + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of +====1 +1:885a +2:1049,1082c +3:1010,1043c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(String, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, + * or if there is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is + * not desired. + * + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the string. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(String, Class) + * @see #fromJson(String, TypeToken) + */ + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the specified JSON into an object of the specified type. This method + * is useful if the specified object is a generic type. For non-generic objects, use + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of + * a String, use {@link #fromJson(Reader, TypeToken)} instead. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * +====1 +1:888,889c + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for +2:1085,1086c +3:1046,1047c + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for +====1 +1:892c + * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType(); +2:1089c +3:1050c + * new TypeToken<Collection<Foo>>(){} +====1 +1:896,897c + * @throws JsonParseException if json is not a valid representation for an object of type typeOfT + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1093,1097c +3:1054,1058c + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(String, Class) + * @since 2.10 +====1 +1:899,900c + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { +2:1099c +3:1060c + public T fromJson(String json, TypeToken typeOfT) throws JsonSyntaxException { +====1 +1:905,906c + T target = (T) fromJson(reader, typeOfT); + return target; +2:1104c +3:1065c + return fromJson(reader, typeOfT); +====1 +1:910c + * This method deserializes the Json read from the specified reader into an object of the +2:1108c +3:1069c + * This method deserializes the JSON read from the specified reader into an object of the +====1 +1:914c + * this method works fine if the any of the fields of the specified object are generics, just the +2:1112c +3:1073c + * this method works fine if any of the fields of the specified object are generics, just the +====1 +1:916c + * invoke {@link #fromJson(Reader, Type)}. If you have the Json in a String form instead of a +2:1114c +3:1075c + * invoke {@link #fromJson(Reader, TypeToken)}. If you have the JSON in a String form instead of a +====1 +1:918a +2:1117,1119c +3:1078,1080c + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:920c + * @param json the reader producing the Json from which the object is to be deserialized. +2:1121c +3:1082c + * @param json the reader producing the JSON from which the object is to be deserialized. +====1 +1:922c + * @return an object of type T from the string. Returns {@code null} if {@code json} is at EOF. +2:1123c +3:1084c + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====1 +1:924c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1125c +3:1086c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====1 +1:925a +2:1127,1129c +3:1088,1090c + * + * @see #fromJson(String, Class) + * @see #fromJson(Reader, TypeToken) +====1 +1:928,930c + JsonReader jsonReader = newJsonReader(json); + Object object = fromJson(jsonReader, classOfT); + assertFullConsumption(object, jsonReader); +2:1132c +3:1093c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:935c + * This method deserializes the Json read from the specified reader into an object of the +2:1137c +3:1098c + * This method deserializes the JSON read from the specified reader into an object of the +====1 +1:937c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the Json in a +2:1139c +3:1100c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a +====1 +1:939a +2:1142,1149c +3:1103,1110c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(Reader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:941,948c + * @param json the reader producing Json from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is at EOF. +2:1151,1153c +3:1112,1114c + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====1 +1:950c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1155c +3:1116c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====1 +1:951a +2:1157,1160c +3:1118,1121c + * + * @see #fromJson(String, Type) + * @see #fromJson(Reader, Class) + * @see #fromJson(Reader, TypeToken) +====1 +1:954a +2:1164,1192c +3:1125,1153c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified reader into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a + * String form instead of a {@link Reader}, use {@link #fromJson(String, TypeToken)} instead. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * + * @param the type of the desired object + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. + * @throws JsonIOException if there was a problem reading from the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type of typeOfT + * + * @see #fromJson(String, TypeToken) + * @see #fromJson(Reader, Class) + * @since 2.10 + */ + public T fromJson(Reader json, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====1 +1:956c + T object = (T) fromJson(jsonReader, typeOfT); +2:1194c +3:1155c + T object = fromJson(jsonReader, typeOfT); +====1 +1:964c + throw new JsonIOException("JSON document was not fully consumed."); +2:1202c +3:1163c + throw new JsonSyntaxException("JSON document was not fully consumed."); +====1 +1:972a +2:1211,1213c +3:1172,1174c + // fromJson(JsonReader, Class) is unfortunately missing and cannot be added now without breaking + // source compatibility in certain cases, see https://github.com/google/gson/pull/1700#discussion_r973764414 + +====1 +1:974c + * Reads the next JSON value from {@code reader} and convert it to an object +2:1215c +3:1176c + * Reads the next JSON value from {@code reader} and converts it to an object +====1 +1:976c + * Since Type is not parameterized by T, this method is type unsafe and should be used carefully +2:1216a +3:1177a +====1 +1:978,979c + * @throws JsonIOException if there was a problem writing to the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1218,1239c +3:1179,1200c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonReader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. If the provided type is a + * {@code Class} the {@code TypeToken} can be created with {@link TypeToken#get(Class)}. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonReader, TypeToken) +====1 +1:982a +2:1243,1276c +3:1204,1237c + return (T) fromJson(reader, TypeToken.get(typeOfT)); + } + + /** + * Reads the next JSON value from {@code reader} and converts it to an object + * of type {@code typeOfT}. Returns {@code null}, if the {@code reader} is at EOF. + * This method is useful if the specified object is a generic type. For non-generic objects, + * {@link #fromJson(JsonReader, Type)} can be called, or {@link TypeToken#get(Class)} can + * be used to create the type token. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonReader, Type) + * @since 2.10 + */ + public T fromJson(JsonReader reader, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====1 +1:989,990c + TypeToken typeToken = (TypeToken) TypeToken.get(typeOfT); + TypeAdapter typeAdapter = getAdapter(typeToken); +2:1283c +3:1244c + TypeAdapter typeAdapter = getAdapter(typeOfT); +====1 +1:1017c + * This method deserializes the Json read from the specified parse tree into an object of the +2:1310c +3:1271c + * This method deserializes the JSON read from the specified parse tree into an object of the +====1 +1:1021c + * this method works fine if the any of the fields of the specified object are generics, just the +2:1314c +3:1275c + * this method works fine if any of the fields of the specified object are generics, just the +====1 +1:1023c + * invoke {@link #fromJson(JsonElement, Type)}. +2:1316,1317c +3:1277,1278c + * invoke {@link #fromJson(JsonElement, TypeToken)}. + * +====1 +1:1028c + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +2:1322c +3:1283c + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====1 +1:1030c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +2:1324c +3:1285c + * @throws JsonSyntaxException if json is not a valid representation for an object of type classOfT +====1 +1:1031a +2:1326,1328c +3:1287,1289c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(JsonElement, TypeToken) +====1 +1:1034c + Object object = fromJson(json, (Type) classOfT); +2:1331c +3:1292c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:1039c + * This method deserializes the Json read from the specified parse tree into an object of the +2:1336c +3:1297c + * This method deserializes the JSON read from the specified parse tree into an object of the +====1 +1:1042a +2:1340,1344c +3:1301,1305c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonElement, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * +====1 +1:1046,1052c + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +2:1348,1349c +3:1309,1310c + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====1 +1:1055a +2:1353,1356c +3:1314,1317c + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonElement, Class) + * @see #fromJson(JsonElement, TypeToken) +====1 +1:1058a +2:1360,1385c +3:1321,1346c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified parse tree into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(JsonElement, Class)} instead. + * + * @param the type of the desired object + * @param json the root of the parse tree of {@link JsonElement}s from which the object is to + * be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *
    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonElement, Class) + * @since 2.10 + */ + public T fromJson(JsonElement json, TypeToken typeOfT) throws JsonSyntaxException { +====1 +1:1062c + return (T) fromJson(new JsonTreeReader(json), typeOfT); +2:1389c +3:1350c + return fromJson(new JsonTreeReader(json), typeOfT); +==== +1:1065,1066c + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate; +2:1392,1402c + <<<<<<< HEAD + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; + ||||||| 47dea2ee + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate; + ======= + static class FutureTypeAdapter extends SerializationDelegatingTypeAdapter { + private TypeAdapter delegate; + >>>>>>> TEMP_RIGHT_BRANCH +3:1353,1355c + static class FutureTypeAdapter extends SerializationDelegatingTypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; +==== +1:1075c + @Override public T read(JsonReader in) throws IOException { +2:1411,1425c + <<<<<<< HEAD + public void markBroken() { + isBroken = true; + } + + private TypeAdapter getResolvedDelegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } + ||||||| 47dea2ee + @Override public T read(JsonReader in) throws IOException { + ======= + private TypeAdapter delegate() { + >>>>>>> TEMP_RIGHT_BRANCH +3:1364,1372c + public void markBroken() { + isBroken = true; + } + + private TypeAdapter delegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } +==== +1:1077c + throw new IllegalStateException(); +2:1427,1436c + <<<<<<< HEAD + // Can occur when adapter is leaked to other thread or when adapter is used for (de-)serialization + // directly within the TypeAdapterFactory which requested it + throw new IllegalStateException("Adapter for type with cyclic dependency has been used" + + " before dependency has been resolved"); + ||||||| 47dea2ee + throw new IllegalStateException(); + ======= + throw new IllegalStateException("Delegate has not been set yet"); + >>>>>>> TEMP_RIGHT_BRANCH +3:1374,1377c + // Can occur when adapter is leaked to other thread or when adapter is used for (de-)serialization + // directly within the TypeAdapterFactory which requested it + throw new IllegalStateException("Adapter for type with cyclic dependency has been used" + + " before dependency has been resolved"); +==== +1:1079c + return delegate.read(in); +2:1438,1453c + return delegate; + } + + <<<<<<< HEAD + @Override public T read(JsonReader in) throws IOException { + return getResolvedDelegate().read(in); + ||||||| 47dea2ee + return delegate.read(in); + ======= + @Override public TypeAdapter getSerializationDelegate() { + return delegate(); + } + + @Override public T read(JsonReader in) throws IOException { + return delegate().read(in); + >>>>>>> TEMP_RIGHT_BRANCH +3:1379,1387c + return delegate; + } + + @Override public TypeAdapter getSerializationDelegate() { + return delegate(); + } + + @Override public T read(JsonReader in) throws IOException { + return delegate().read(in); +==== +1:1083,1086c + if (delegate == null) { + throw new IllegalStateException(); + } + delegate.write(out, value); +2:1457,1466c + <<<<<<< HEAD + getResolvedDelegate().write(out, value); + ||||||| 47dea2ee + if (delegate == null) { + throw new IllegalStateException(); + } + delegate.write(out, value); + ======= + delegate().write(out, value); + >>>>>>> TEMP_RIGHT_BRANCH +3:1391c + delegate().write(out, value); diff --git a/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_minimal/diff_GsonTest.java.txt b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_minimal/diff_GsonTest.java.txt new file mode 100644 index 0000000000..ababbc30f8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_minimal/diff_GsonTest.java.txt @@ -0,0 +1,897 @@ +====1 +1:18a +2:19c +3:19c + import com.google.gson.Gson.FutureTypeAdapter; +====1 +1:19a +2:21c +3:21c + import com.google.gson.reflect.TypeToken; +====1 +1:29a +2:32c +3:32c + import java.util.Collections; +==== +1:30a +2:34,40c + <<<<<<< HEAD + import java.util.concurrent.CountDownLatch; + ||||||| 47dea2ee + ======= + import java.util.concurrent.atomic.AtomicInteger; + >>>>>>> TEMP_RIGHT_BRANCH + import java.util.concurrent.atomic.AtomicReference; +3:34,36c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.atomic.AtomicInteger; + import java.util.concurrent.atomic.AtomicReference; +====1 +1:59c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +2:69,70c +3:65,66c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====1 +1:73c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +2:84,85c +3:80,81c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +==== +1:88a +2:101,163c + public void testGetAdapter_Null() { + Gson gson = new Gson(); + try { + gson.getAdapter((TypeToken) null); + fail(); + } catch (NullPointerException e) { + assertEquals("type must not be null", e.getMessage()); + } + } + + public void testGetAdapter_Concurrency() { + class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for test"); + } + } + + final AtomicInteger adapterInstancesCreated = new AtomicInteger(0); + final AtomicReference> threadAdapter = new AtomicReference<>(); + final Class requestedType = Number.class; + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + private volatile boolean isFirstCall = true; + + @Override public TypeAdapter create(final Gson gson, TypeToken type) { + if (isFirstCall) { + isFirstCall = false; + + // Create a separate thread which requests an adapter for the same type + // This will cause this factory to return a different adapter instance than + // the one it is currently creating + Thread thread = new Thread() { + @Override public void run() { + threadAdapter.set(gson.getAdapter(requestedType)); + } + }; + thread.start(); + try { + thread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + // Create a new dummy adapter instance + adapterInstancesCreated.incrementAndGet(); + return new DummyAdapter<>(); + } + }) + .create(); + + TypeAdapter adapter = gson.getAdapter(requestedType); + assertTrue(adapter instanceof DummyAdapter); + assertEquals(2, adapterInstancesCreated.get()); + // Should be the same adapter instance the concurrent thread received + assertSame(threadAdapter.get(), adapter); + } + +3:97,149c + public void testGetAdapter_Null() { + Gson gson = new Gson(); + try { + gson.getAdapter((TypeToken) null); + fail(); + } catch (NullPointerException e) { + assertEquals("type must not be null", e.getMessage()); + } + } + + public void testGetAdapter_Concurrency() { + final AtomicInteger adapterInstancesCreated = new AtomicInteger(0); + final AtomicReference> threadAdapter = new AtomicReference<>(); + final Class requestedType = Number.class; + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + private volatile boolean isFirstCall = true; + + @Override public TypeAdapter create(final Gson gson, TypeToken type) { + if (isFirstCall) { + isFirstCall = false; + + // Create a separate thread which requests an adapter for the same type + // This will cause this factory to return a different adapter instance than + // the one it is currently creating + Thread thread = new Thread() { + @Override public void run() { + threadAdapter.set(gson.getAdapter(requestedType)); + } + }; + thread.start(); + try { + thread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + // Create a new dummy adapter instance + adapterInstancesCreated.incrementAndGet(); + return new DummyAdapter<>(); + } + }) + .create(); + + TypeAdapter adapter = gson.getAdapter(requestedType); + assertTrue(adapter instanceof DummyAdapter); + assertEquals(2, adapterInstancesCreated.get()); + // Should be the same adapter instance the concurrent thread received + assertSame(threadAdapter.get(), adapter); + } + +==== +1:154a +2:230,593c + + /** + <<<<<<< HEAD + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + if (callCount == 0) { + callCount++; + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + ||||||| 47dea2ee + ======= + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a + * {@code new Gson()} should not affect the Gson instance it came from. + */ + public void testDefaultGsonNewBuilderModification() { + Gson gson = new Gson(); + GsonBuilder gsonBuilder = gson.newBuilder(); + + // Modifications of `gsonBuilder` should not affect `gson` object + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }); + + assertDefaultGson(gson); + // New GsonBuilder created from `gson` should not have been affected by changes either + assertDefaultGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should use custom adapters + assertCustomGson(gsonBuilder.create()); + } + + private static void assertDefaultGson(Gson gson) { + // Should use default reflective adapter + String json1 = gson.toJson(new CustomClass1()); + assertEquals("{}", json1); + + // Should use default reflective adapter + String json2 = gson.toJson(new CustomClass2()); + assertEquals("{}", json2); + + // Should use default instance creator + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals(CustomClass3.NO_ARG_CONSTRUCTOR_VALUE, customClass3.s); + } + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a custom + * Gson instance (created using a GsonBuilder) should not affect the Gson instance + * it came from. + */ + public void testNewBuilderModification() { + Gson gson = new GsonBuilder() + .registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }) + .registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }) + .registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }) + .create(); + + assertCustomGson(gson); + + // Modify `gson.newBuilder()` + GsonBuilder gsonBuilder = gson.newBuilder(); + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("overwritten custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("overwritten custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("overwritten custom-instance"); + } + }); + + // `gson` object should not have been affected by changes to new GsonBuilder + assertCustomGson(gson); + // New GsonBuilder based on `gson` should not have been affected either + assertCustomGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should be affected by changes + Gson otherGson = gsonBuilder.create(); + String json1 = otherGson.toJson(new CustomClass1()); + assertEquals("\"overwritten custom-adapter\"", json1); + + String json2 = otherGson.toJson(new CustomClass2()); + assertEquals("\"overwritten custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = otherGson.fromJson("{}", CustomClass3.class); + assertEquals("overwritten custom-instance", customClass3.s); + } + + private static void assertCustomGson(Gson gson) { + String json1 = gson.toJson(new CustomClass1()); + assertEquals("\"custom-adapter\"", json1); + + String json2 = gson.toJson(new CustomClass2()); + assertEquals("\"custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals("custom-instance", customClass3.s); + } + + static class CustomClass1 { } + static class CustomClass2 { } + static class CustomClass3 { + static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance"; + + final String s; + + public CustomClass3(String s) { + this.s = s; + } + + public CustomClass3() { + this(NO_ARG_CONSTRUCTOR_VALUE); + } + >>>>>>> TEMP_RIGHT_BRANCH + } +3:216,581c + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a + * {@code new Gson()} should not affect the Gson instance it came from. + */ + public void testDefaultGsonNewBuilderModification() { + Gson gson = new Gson(); + GsonBuilder gsonBuilder = gson.newBuilder(); + + // Modifications of `gsonBuilder` should not affect `gson` object + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }); + + assertDefaultGson(gson); + // New GsonBuilder created from `gson` should not have been affected by changes either + assertDefaultGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should use custom adapters + assertCustomGson(gsonBuilder.create()); + } + + private static void assertDefaultGson(Gson gson) { + // Should use default reflective adapter + String json1 = gson.toJson(new CustomClass1()); + assertEquals("{}", json1); + + // Should use default reflective adapter + String json2 = gson.toJson(new CustomClass2()); + assertEquals("{}", json2); + + // Should use default instance creator + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals(CustomClass3.NO_ARG_CONSTRUCTOR_VALUE, customClass3.s); + } + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a custom + * Gson instance (created using a GsonBuilder) should not affect the Gson instance + * it came from. + */ + public void testNewBuilderModification() { + Gson gson = new GsonBuilder() + .registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }) + .registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }) + .registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }) + .create(); + + assertCustomGson(gson); + + // Modify `gson.newBuilder()` + GsonBuilder gsonBuilder = gson.newBuilder(); + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("overwritten custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("overwritten custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("overwritten custom-instance"); + } + }); + + // `gson` object should not have been affected by changes to new GsonBuilder + assertCustomGson(gson); + // New GsonBuilder based on `gson` should not have been affected either + assertCustomGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should be affected by changes + Gson otherGson = gsonBuilder.create(); + String json1 = otherGson.toJson(new CustomClass1()); + assertEquals("\"overwritten custom-adapter\"", json1); + + String json2 = otherGson.toJson(new CustomClass2()); + assertEquals("\"overwritten custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = otherGson.fromJson("{}", CustomClass3.class); + assertEquals("overwritten custom-instance", customClass3.s); + } + + private static void assertCustomGson(Gson gson) { + String json1 = gson.toJson(new CustomClass1()); + assertEquals("\"custom-adapter\"", json1); + + String json2 = gson.toJson(new CustomClass2()); + assertEquals("\"custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals("custom-instance", customClass3.s); + } + + static class CustomClass1 { } + static class CustomClass2 { } + static class CustomClass3 { + static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance"; + + final String s; + + public CustomClass3(String s) { + this.s = s; + } + + public CustomClass3() { + this(NO_ARG_CONSTRUCTOR_VALUE); + } + } + + /** + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + try { + if (callCount++ == 0) { + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } finally { + callCount--; + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + } diff --git a/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_myers/diff_Gson.java.txt b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_myers/diff_Gson.java.txt new file mode 100644 index 0000000000..8b98a8e9f3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_myers/diff_Gson.java.txt @@ -0,0 +1,1221 @@ +====1 +1:19,37c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; + +2:18a +3:18a +====1 +1:53a +2:35c +3:35c + import com.google.gson.internal.bind.SerializationDelegatingTypeAdapter; +==== +1:60a +2:43,70c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + <<<<<<< HEAD + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + ||||||| 47dea2ee + ======= + import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + >>>>>>> TEMP_RIGHT_BRANCH + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +3:43,63c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +====1 +1:78c + * String json = gson.toJson(target); // serializes target to Json +2:88c +3:81c + * String json = gson.toJson(target); // serializes target to JSON +====1 +1:82,86c + *

    If the object that your are serializing/deserializing is a {@code ParameterizedType} + * (i.e. contains at least one type parameter and may be an array) then you must use the + * {@link #toJson(Object, Type)} or {@link #fromJson(String, Type)} method. Here is an + * example for serializing and deserializing a {@code ParameterizedType}: + * +2:92,98c +3:85,91c + *

    If the type of the object that you are converting is a {@code ParameterizedType} + * (i.e. has at least one type argument, for example {@code List}) then for + * deserialization you must use a {@code fromJson} method with {@link Type} or {@link TypeToken} + * parameter to specify the parameterized type. For serialization specifying a {@code Type} + * or {@code TypeToken} is optional, otherwise Gson will use the runtime type of the object. + * {@link TypeToken} is a class provided by Gson which helps creating parameterized types. + * Here is an example showing how this can be done: +====1 +1:88,90c + * Type listType = new TypeToken<List<String>>() {}.getType(); + * List<String> target = new LinkedList<String>(); + * target.add("blah"); +2:100,102c +3:93,95c + * TypeToken<List<MyType>> listType = new TypeToken<List<MyType>>() {}; + * List<MyType> target = new LinkedList<MyType>(); + * target.add(new MyType(1, "abc")); +====1 +1:93,94c + * String json = gson.toJson(target, listType); + * List<String> target2 = gson.fromJson(json, listType); +2:105,110c +3:98,103c + * // For serialization you normally do not have to specify the type, Gson will use + * // the runtime type of the objects, however you can also specify it explicitly + * String json = gson.toJson(target, listType.getType()); + * + * // But for deserialization you have to specify the type + * List<MyType> target2 = gson.fromJson(json, listType); +====1 +1:97c + *

    See the Gson User Guide +2:113c +3:106c + *

    See the Gson User Guide +====1 +1:100c + * @see com.google.gson.reflect.TypeToken +2:116,143c +3:109,136c + *

    Lenient JSON handling

    + * For legacy reasons most of the {@code Gson} methods allow JSON data which does not + * comply with the JSON specification, regardless of whether {@link GsonBuilder#setLenient()} + * is used or not. If this behavior is not desired, the following workarounds can be used: + * + *

    Serialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be serialized + *
    2. When using an existing {@code JsonWriter}, manually apply the writer settings of this + * {@code Gson} instance listed by {@link #newJsonWriter(Writer)}.
      + * Otherwise, when not using an existing {@code JsonWriter}, use {@link #newJsonWriter(Writer)} + * to construct one. + *
    3. Call {@link TypeAdapter#write(JsonWriter, Object)} + *
    + * + *

    Deserialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be deserialized + *
    2. When using an existing {@code JsonReader}, manually apply the reader settings of this + * {@code Gson} instance listed by {@link #newJsonReader(Reader)}.
      + * Otherwise, when not using an existing {@code JsonReader}, use {@link #newJsonReader(Reader)} + * to construct one. + *
    3. Call {@link TypeAdapter#read(JsonReader)} + *
    4. Call {@link JsonReader#peek()} and verify that the result is {@link JsonToken#END_DOCUMENT} + * to make sure there is no trailing data + *
    + * + * @see TypeToken +====1 +1:120c + private static final TypeToken NULL_KEY_SURROGATE = TypeToken.get(Object.class); +2:162a +3:155a +==== +1:130,133c + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal, FutureTypeAdapter>>(); + + private final Map, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); +2:172,189c + <<<<<<< HEAD + // Uses LinkedHashMap because iteration order is important, see getAdapter() implementation below + private final ThreadLocal, TypeAdapter>> calls = new ThreadLocal<>(); + ||||||| 47dea2ee + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal, FutureTypeAdapter>>(); + ======= + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal<>(); + >>>>>>> TEMP_RIGHT_BRANCH + + <<<<<<< HEAD + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); + ||||||| 47dea2ee + private final Map, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); + ======= + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap<>(); + >>>>>>> TEMP_RIGHT_BRANCH +3:165,168c + // Uses LinkedHashMap because iteration order is important, see getAdapter() implementation below + private final ThreadLocal, TypeAdapter>> calls = new ThreadLocal<>(); + + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap<>(); +====1 +1:158a +2:215c +3:194c + final List reflectionFilters; +====1 +1:185c + *
  • The default field naming policy for the output Json is same as in Java. So, a Java class +2:242c +3:221c + *
  • The default field naming policy for the output JSON is same as in Java. So, a Java class +====1 +1:187c + * Json. The same rules are applied for mapping incoming Json to the Java classes. You can +2:244c +3:223c + * JSON. The same rules are applied for mapping incoming JSON to the Java classes. You can +====1 +1:202c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY); +2:259,260c +3:238,239c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====1 +1:214c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy) { +2:272,273c +3:251,252c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy, + List reflectionFilters) { +====1 +1:218c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe); +2:277c +3:256c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe, reflectionFilters); +====1 +1:234a +2:294c +3:273c + this.reflectionFilters = reflectionFilters; +====1 +1:236c + List factories = new ArrayList(); +2:296c +3:275c + List factories = new ArrayList<>(); +====1 +1:299c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory)); +2:359c +3:338c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory, reflectionFilters)); +====1 +1:308a +2:369c +3:348c + * @since 2.8.3 +====1 +1:371c + out.value(value); +2:432c +3:411c + out.value(doubleValue); +====1 +1:395c + out.value(value); +2:456,459c +3:435,438c + // For backward compatibility don't call `JsonWriter.value(float)` because that method has + // been newly added and not all custom JsonWriter implementations might override it yet + Number floatNumber = value instanceof Float ? value : floatValue; + out.value(floatNumber); +====1 +1:452c + List list = new ArrayList(); +2:516c +3:495c + List list = new ArrayList<>(); +====1 +1:475c + @SuppressWarnings("unchecked") +2:538a +3:517a +====1 +1:477c + TypeAdapter cached = typeTokenCache.get(type == null ? NULL_KEY_SURROGATE : type); +2:540,541c +3:519,520c + Objects.requireNonNull(type, "type must not be null"); + TypeAdapter cached = typeTokenCache.get(type); +====1 +1:479c + return (TypeAdapter) cached; +2:543,545c +3:522,524c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) cached; + return adapter; +====1 +1:482,483c + Map, FutureTypeAdapter> threadCalls = calls.get(); + boolean requiresThreadLocalCleanup = false; +2:548,549c +3:527,528c + LinkedHashMap, TypeAdapter> threadCalls = calls.get(); + boolean isInitialAdapterRequest = false; +==== +1:485c + threadCalls = new HashMap, FutureTypeAdapter>(); +2:551,557c + <<<<<<< HEAD + threadCalls = new LinkedHashMap<>(); + ||||||| 47dea2ee + threadCalls = new HashMap, FutureTypeAdapter>(); + ======= + threadCalls = new HashMap<>(); + >>>>>>> TEMP_RIGHT_BRANCH +3:530c + threadCalls = new LinkedHashMap<>(); +====1 +1:487c + requiresThreadLocalCleanup = true; +2:559c +3:532c + isInitialAdapterRequest = true; +==== +1:491c + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); +2:563,570c + <<<<<<< HEAD + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); + ||||||| 47dea2ee + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); + ======= + @SuppressWarnings("unchecked") + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); + >>>>>>> TEMP_RIGHT_BRANCH +3:536,537c + @SuppressWarnings("unchecked") + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); +====1 +1:495a +2:575,576c +3:542,543c + int existingAdaptersCount = threadCalls.size(); + boolean foundCandidate = false; +====1 +1:497c + FutureTypeAdapter call = new FutureTypeAdapter(); +2:578c +3:545c + FutureTypeAdapter call = new FutureTypeAdapter<>(); +====2 +1:502a +3:550a +2:584,590c + @SuppressWarnings("unchecked") + TypeAdapter existingAdapter = (TypeAdapter) typeTokenCache.putIfAbsent(type, candidate); + // If other thread concurrently added adapter prefer that one instead + if (existingAdapter != null) { + candidate = existingAdapter; + } + +==== +1:504c + typeTokenCache.put(type, candidate); +2:592,609c + <<<<<<< HEAD + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + } + foundCandidate = true; + ||||||| 47dea2ee + typeTokenCache.put(type, candidate); + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:552,570c + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + + @SuppressWarnings("unchecked") + TypeAdapter actualAdapter = (TypeAdapter) typeTokenCache.get(type); + // Prefer the actual adapter, in case putIfAbsent call above had no effect because other + // thread already concurrently added other adapter instance for the same type + candidate = actualAdapter; + } + foundCandidate = true; +====1 +1:510,512c + threadCalls.remove(type); + + if (requiresThreadLocalCleanup) { +2:615c +3:576c + if (isInitialAdapterRequest) { +==== +1:514a +2:618,635c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +3:579,596c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +====1 +1:607c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:728c +3:689c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:611,612c + * @param src the object for which Json representation is to be created setting for Gson + * @return Json representation of {@code src}. +2:732,733c +3:693,694c + * @param src the object for which JSON representation is to be created + * @return JSON representation of {@code src}. +====1 +1:613a +2:735,736c +3:696,697c + * + * @see #toJsonTree(Object, Type) +====1 +1:636a +2:760,761c +3:721,722c + * + * @see #toJsonTree(Object) +====1 +1:645c + * This method serializes the specified object into its equivalent Json representation. +2:770c +3:731c + * This method serializes the specified object into its equivalent JSON representation. +====1 +1:649c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:774c +3:735c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:654c + * @param src the object for which Json representation is to be created setting for Gson +2:779c +3:740c + * @param src the object for which JSON representation is to be created +====1 +1:655a +2:781,783c +3:742,744c + * + * @see #toJson(Object, Appendable) + * @see #toJson(Object, Type) +====1 +1:666c + * equivalent Json representation. This method must be used if the specified object is a generic +2:794c +3:755c + * equivalent JSON representation. This method must be used if the specified object is a generic +====1 +1:677c + * @return Json representation of {@code src} +2:805,808c +3:766,769c + * @return JSON representation of {@code src} + * + * @see #toJson(Object, Type, Appendable) + * @see #toJson(Object) +====1 +1:686c + * This method serializes the specified object into its equivalent Json representation. +2:817,818c +3:778,779c + * This method serializes the specified object into its equivalent JSON representation and + * writes it to the writer. +====1 +1:690c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:822c +3:783c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:694,695c + * @param src the object for which Json representation is to be created setting for Gson + * @param writer Writer to which the Json representation needs to be written +2:826,827c +3:787,788c + * @param src the object for which JSON representation is to be created + * @param writer Writer to which the JSON representation needs to be written +====1 +1:697a +2:830,832c +3:791,793c + * + * @see #toJson(Object) + * @see #toJson(Object, Type, Appendable) +====1 +1:709,710c + * equivalent Json representation. This method must be used if the specified object is a generic + * type. For non-generic objects, use {@link #toJson(Object, Appendable)} instead. +2:844,846c +3:805,807c + * equivalent JSON representation and writes it to the writer. + * This method must be used if the specified object is a generic type. For non-generic objects, + * use {@link #toJson(Object, Appendable)} instead. +====1 +1:719c + * @param writer Writer to which the Json representation of src needs to be written. +2:855c +3:816c + * @param writer Writer to which the JSON representation of src needs to be written. +====1 +1:721a +2:858,860c +3:819,821c + * + * @see #toJson(Object, Type) + * @see #toJson(Object, Appendable) +====1 +1:734a +2:874,882c +3:835,843c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====1 +1:737c + @SuppressWarnings("unchecked") +2:884a +3:845a +====1 +1:739c + TypeAdapter adapter = getAdapter(TypeToken.get(typeOfSrc)); +2:886,887c +3:847,848c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) getAdapter(TypeToken.get(typeOfSrc)); +====1 +1:747c + ((TypeAdapter) adapter).write(writer, src); +2:895c +3:856c + adapter.write(writer, src); +====1 +1:778c + * @param writer Writer to which the Json representation needs to be written +2:926c +3:887c + * @param writer Writer to which the JSON representation needs to be written +====1 +1:832a +2:981,989c +3:942,950c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====1 +1:858c + * This method deserializes the specified Json into an object of the specified class. It is not +2:1015c +3:976c + * This method deserializes the specified JSON into an object of the specified class. It is not +====1 +1:864c + * {@link #fromJson(String, Type)}. If you have the Json in a {@link Reader} instead of +2:1021c +3:982c + * {@link #fromJson(String, TypeToken)}. If you have the JSON in a {@link Reader} instead of +====1 +1:866a +2:1024,1026c +3:985,987c + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:873a +2:1034,1036c +3:995,997c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(String, TypeToken) +====1 +1:876c + Object object = fromJson(json, (Type) classOfT); +2:1039c +3:1000c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:881c + * This method deserializes the specified Json into an object of the specified type. This method +2:1044c +3:1005c + * This method deserializes the specified JSON into an object of the specified type. This method +====1 +1:883c + * {@link #fromJson(String, Class)} instead. If you have the Json in a {@link Reader} instead of +2:1046c +3:1007c + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of +====1 +1:885a +2:1049,1082c +3:1010,1043c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(String, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, + * or if there is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is + * not desired. + * + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the string. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(String, Class) + * @see #fromJson(String, TypeToken) + */ + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the specified JSON into an object of the specified type. This method + * is useful if the specified object is a generic type. For non-generic objects, use + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of + * a String, use {@link #fromJson(Reader, TypeToken)} instead. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * +====1 +1:888,889c + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for +2:1085,1086c +3:1046,1047c + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for +====1 +1:892c + * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType(); +2:1089c +3:1050c + * new TypeToken<Collection<Foo>>(){} +====1 +1:896,897c + * @throws JsonParseException if json is not a valid representation for an object of type typeOfT + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1093,1097c +3:1054,1058c + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(String, Class) + * @since 2.10 +====1 +1:899,900c + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { +2:1099c +3:1060c + public T fromJson(String json, TypeToken typeOfT) throws JsonSyntaxException { +====1 +1:905,906c + T target = (T) fromJson(reader, typeOfT); + return target; +2:1104c +3:1065c + return fromJson(reader, typeOfT); +====1 +1:910c + * This method deserializes the Json read from the specified reader into an object of the +2:1108c +3:1069c + * This method deserializes the JSON read from the specified reader into an object of the +====1 +1:914c + * this method works fine if the any of the fields of the specified object are generics, just the +2:1112c +3:1073c + * this method works fine if any of the fields of the specified object are generics, just the +====1 +1:916c + * invoke {@link #fromJson(Reader, Type)}. If you have the Json in a String form instead of a +2:1114c +3:1075c + * invoke {@link #fromJson(Reader, TypeToken)}. If you have the JSON in a String form instead of a +====1 +1:918a +2:1117,1119c +3:1078,1080c + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:920c + * @param json the reader producing the Json from which the object is to be deserialized. +2:1121c +3:1082c + * @param json the reader producing the JSON from which the object is to be deserialized. +====1 +1:922c + * @return an object of type T from the string. Returns {@code null} if {@code json} is at EOF. +2:1123c +3:1084c + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====1 +1:924c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1125c +3:1086c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====1 +1:925a +2:1127,1129c +3:1088,1090c + * + * @see #fromJson(String, Class) + * @see #fromJson(Reader, TypeToken) +====1 +1:928,930c + JsonReader jsonReader = newJsonReader(json); + Object object = fromJson(jsonReader, classOfT); + assertFullConsumption(object, jsonReader); +2:1132c +3:1093c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:935c + * This method deserializes the Json read from the specified reader into an object of the +2:1137c +3:1098c + * This method deserializes the JSON read from the specified reader into an object of the +====1 +1:937c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the Json in a +2:1139c +3:1100c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a +====1 +1:939a +2:1142,1149c +3:1103,1110c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(Reader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:941,948c + * @param json the reader producing Json from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is at EOF. +2:1151,1153c +3:1112,1114c + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====1 +1:950c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1155c +3:1116c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====1 +1:951a +2:1157,1160c +3:1118,1121c + * + * @see #fromJson(String, Type) + * @see #fromJson(Reader, Class) + * @see #fromJson(Reader, TypeToken) +====1 +1:954a +2:1164,1192c +3:1125,1153c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified reader into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a + * String form instead of a {@link Reader}, use {@link #fromJson(String, TypeToken)} instead. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * + * @param the type of the desired object + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. + * @throws JsonIOException if there was a problem reading from the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type of typeOfT + * + * @see #fromJson(String, TypeToken) + * @see #fromJson(Reader, Class) + * @since 2.10 + */ + public T fromJson(Reader json, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====1 +1:956c + T object = (T) fromJson(jsonReader, typeOfT); +2:1194c +3:1155c + T object = fromJson(jsonReader, typeOfT); +====1 +1:964c + throw new JsonIOException("JSON document was not fully consumed."); +2:1202c +3:1163c + throw new JsonSyntaxException("JSON document was not fully consumed."); +====1 +1:972a +2:1211,1213c +3:1172,1174c + // fromJson(JsonReader, Class) is unfortunately missing and cannot be added now without breaking + // source compatibility in certain cases, see https://github.com/google/gson/pull/1700#discussion_r973764414 + +====1 +1:974c + * Reads the next JSON value from {@code reader} and convert it to an object +2:1215c +3:1176c + * Reads the next JSON value from {@code reader} and converts it to an object +====1 +1:976c + * Since Type is not parameterized by T, this method is type unsafe and should be used carefully +2:1216a +3:1177a +====1 +1:978,979c + * @throws JsonIOException if there was a problem writing to the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1218,1239c +3:1179,1200c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonReader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. If the provided type is a + * {@code Class} the {@code TypeToken} can be created with {@link TypeToken#get(Class)}. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonReader, TypeToken) +====1 +1:982a +2:1243,1276c +3:1204,1237c + return (T) fromJson(reader, TypeToken.get(typeOfT)); + } + + /** + * Reads the next JSON value from {@code reader} and converts it to an object + * of type {@code typeOfT}. Returns {@code null}, if the {@code reader} is at EOF. + * This method is useful if the specified object is a generic type. For non-generic objects, + * {@link #fromJson(JsonReader, Type)} can be called, or {@link TypeToken#get(Class)} can + * be used to create the type token. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonReader, Type) + * @since 2.10 + */ + public T fromJson(JsonReader reader, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====1 +1:989,990c + TypeToken typeToken = (TypeToken) TypeToken.get(typeOfT); + TypeAdapter typeAdapter = getAdapter(typeToken); +2:1283c +3:1244c + TypeAdapter typeAdapter = getAdapter(typeOfT); +====1 +1:1017c + * This method deserializes the Json read from the specified parse tree into an object of the +2:1310c +3:1271c + * This method deserializes the JSON read from the specified parse tree into an object of the +====1 +1:1021c + * this method works fine if the any of the fields of the specified object are generics, just the +2:1314c +3:1275c + * this method works fine if any of the fields of the specified object are generics, just the +====1 +1:1023c + * invoke {@link #fromJson(JsonElement, Type)}. +2:1316,1317c +3:1277,1278c + * invoke {@link #fromJson(JsonElement, TypeToken)}. + * +====1 +1:1028c + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +2:1322c +3:1283c + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====1 +1:1030c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +2:1324c +3:1285c + * @throws JsonSyntaxException if json is not a valid representation for an object of type classOfT +====1 +1:1031a +2:1326,1328c +3:1287,1289c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(JsonElement, TypeToken) +====1 +1:1034c + Object object = fromJson(json, (Type) classOfT); +2:1331c +3:1292c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:1039c + * This method deserializes the Json read from the specified parse tree into an object of the +2:1336c +3:1297c + * This method deserializes the JSON read from the specified parse tree into an object of the +====1 +1:1042a +2:1340,1344c +3:1301,1305c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonElement, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * +====1 +1:1046,1052c + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +2:1348,1349c +3:1309,1310c + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====1 +1:1055a +2:1353,1356c +3:1314,1317c + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonElement, Class) + * @see #fromJson(JsonElement, TypeToken) +====1 +1:1058a +2:1360,1385c +3:1321,1346c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified parse tree into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(JsonElement, Class)} instead. + * + * @param the type of the desired object + * @param json the root of the parse tree of {@link JsonElement}s from which the object is to + * be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *
    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonElement, Class) + * @since 2.10 + */ + public T fromJson(JsonElement json, TypeToken typeOfT) throws JsonSyntaxException { +====1 +1:1062c + return (T) fromJson(new JsonTreeReader(json), typeOfT); +2:1389c +3:1350c + return fromJson(new JsonTreeReader(json), typeOfT); +==== +1:1065,1066c + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate; +2:1392,1402c + <<<<<<< HEAD + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; + ||||||| 47dea2ee + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate; + ======= + static class FutureTypeAdapter extends SerializationDelegatingTypeAdapter { + private TypeAdapter delegate; + >>>>>>> TEMP_RIGHT_BRANCH +3:1353,1355c + static class FutureTypeAdapter extends SerializationDelegatingTypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; +==== +1:1075c + @Override public T read(JsonReader in) throws IOException { +2:1411,1425c + <<<<<<< HEAD + public void markBroken() { + isBroken = true; + } + + private TypeAdapter getResolvedDelegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } + ||||||| 47dea2ee + @Override public T read(JsonReader in) throws IOException { + ======= + private TypeAdapter delegate() { + >>>>>>> TEMP_RIGHT_BRANCH +3:1364,1372c + public void markBroken() { + isBroken = true; + } + + private TypeAdapter delegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } +==== +1:1077c + throw new IllegalStateException(); +2:1427,1436c + <<<<<<< HEAD + // Can occur when adapter is leaked to other thread or when adapter is used for (de-)serialization + // directly within the TypeAdapterFactory which requested it + throw new IllegalStateException("Adapter for type with cyclic dependency has been used" + + " before dependency has been resolved"); + ||||||| 47dea2ee + throw new IllegalStateException(); + ======= + throw new IllegalStateException("Delegate has not been set yet"); + >>>>>>> TEMP_RIGHT_BRANCH +3:1374,1377c + // Can occur when adapter is leaked to other thread or when adapter is used for (de-)serialization + // directly within the TypeAdapterFactory which requested it + throw new IllegalStateException("Adapter for type with cyclic dependency has been used" + + " before dependency has been resolved"); +==== +1:1079c + return delegate.read(in); +2:1438,1453c + return delegate; + } + + <<<<<<< HEAD + @Override public T read(JsonReader in) throws IOException { + return getResolvedDelegate().read(in); + ||||||| 47dea2ee + return delegate.read(in); + ======= + @Override public TypeAdapter getSerializationDelegate() { + return delegate(); + } + + @Override public T read(JsonReader in) throws IOException { + return delegate().read(in); + >>>>>>> TEMP_RIGHT_BRANCH +3:1379,1387c + return delegate; + } + + @Override public TypeAdapter getSerializationDelegate() { + return delegate(); + } + + @Override public T read(JsonReader in) throws IOException { + return delegate().read(in); +==== +1:1083,1086c + if (delegate == null) { + throw new IllegalStateException(); + } + delegate.write(out, value); +2:1457,1466c + <<<<<<< HEAD + getResolvedDelegate().write(out, value); + ||||||| 47dea2ee + if (delegate == null) { + throw new IllegalStateException(); + } + delegate.write(out, value); + ======= + delegate().write(out, value); + >>>>>>> TEMP_RIGHT_BRANCH +3:1391c + delegate().write(out, value); diff --git a/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_myers/diff_GsonTest.java.txt b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_myers/diff_GsonTest.java.txt new file mode 100644 index 0000000000..ababbc30f8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_myers/diff_GsonTest.java.txt @@ -0,0 +1,897 @@ +====1 +1:18a +2:19c +3:19c + import com.google.gson.Gson.FutureTypeAdapter; +====1 +1:19a +2:21c +3:21c + import com.google.gson.reflect.TypeToken; +====1 +1:29a +2:32c +3:32c + import java.util.Collections; +==== +1:30a +2:34,40c + <<<<<<< HEAD + import java.util.concurrent.CountDownLatch; + ||||||| 47dea2ee + ======= + import java.util.concurrent.atomic.AtomicInteger; + >>>>>>> TEMP_RIGHT_BRANCH + import java.util.concurrent.atomic.AtomicReference; +3:34,36c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.atomic.AtomicInteger; + import java.util.concurrent.atomic.AtomicReference; +====1 +1:59c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +2:69,70c +3:65,66c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====1 +1:73c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +2:84,85c +3:80,81c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +==== +1:88a +2:101,163c + public void testGetAdapter_Null() { + Gson gson = new Gson(); + try { + gson.getAdapter((TypeToken) null); + fail(); + } catch (NullPointerException e) { + assertEquals("type must not be null", e.getMessage()); + } + } + + public void testGetAdapter_Concurrency() { + class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for test"); + } + } + + final AtomicInteger adapterInstancesCreated = new AtomicInteger(0); + final AtomicReference> threadAdapter = new AtomicReference<>(); + final Class requestedType = Number.class; + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + private volatile boolean isFirstCall = true; + + @Override public TypeAdapter create(final Gson gson, TypeToken type) { + if (isFirstCall) { + isFirstCall = false; + + // Create a separate thread which requests an adapter for the same type + // This will cause this factory to return a different adapter instance than + // the one it is currently creating + Thread thread = new Thread() { + @Override public void run() { + threadAdapter.set(gson.getAdapter(requestedType)); + } + }; + thread.start(); + try { + thread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + // Create a new dummy adapter instance + adapterInstancesCreated.incrementAndGet(); + return new DummyAdapter<>(); + } + }) + .create(); + + TypeAdapter adapter = gson.getAdapter(requestedType); + assertTrue(adapter instanceof DummyAdapter); + assertEquals(2, adapterInstancesCreated.get()); + // Should be the same adapter instance the concurrent thread received + assertSame(threadAdapter.get(), adapter); + } + +3:97,149c + public void testGetAdapter_Null() { + Gson gson = new Gson(); + try { + gson.getAdapter((TypeToken) null); + fail(); + } catch (NullPointerException e) { + assertEquals("type must not be null", e.getMessage()); + } + } + + public void testGetAdapter_Concurrency() { + final AtomicInteger adapterInstancesCreated = new AtomicInteger(0); + final AtomicReference> threadAdapter = new AtomicReference<>(); + final Class requestedType = Number.class; + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + private volatile boolean isFirstCall = true; + + @Override public TypeAdapter create(final Gson gson, TypeToken type) { + if (isFirstCall) { + isFirstCall = false; + + // Create a separate thread which requests an adapter for the same type + // This will cause this factory to return a different adapter instance than + // the one it is currently creating + Thread thread = new Thread() { + @Override public void run() { + threadAdapter.set(gson.getAdapter(requestedType)); + } + }; + thread.start(); + try { + thread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + // Create a new dummy adapter instance + adapterInstancesCreated.incrementAndGet(); + return new DummyAdapter<>(); + } + }) + .create(); + + TypeAdapter adapter = gson.getAdapter(requestedType); + assertTrue(adapter instanceof DummyAdapter); + assertEquals(2, adapterInstancesCreated.get()); + // Should be the same adapter instance the concurrent thread received + assertSame(threadAdapter.get(), adapter); + } + +==== +1:154a +2:230,593c + + /** + <<<<<<< HEAD + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + if (callCount == 0) { + callCount++; + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + ||||||| 47dea2ee + ======= + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a + * {@code new Gson()} should not affect the Gson instance it came from. + */ + public void testDefaultGsonNewBuilderModification() { + Gson gson = new Gson(); + GsonBuilder gsonBuilder = gson.newBuilder(); + + // Modifications of `gsonBuilder` should not affect `gson` object + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }); + + assertDefaultGson(gson); + // New GsonBuilder created from `gson` should not have been affected by changes either + assertDefaultGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should use custom adapters + assertCustomGson(gsonBuilder.create()); + } + + private static void assertDefaultGson(Gson gson) { + // Should use default reflective adapter + String json1 = gson.toJson(new CustomClass1()); + assertEquals("{}", json1); + + // Should use default reflective adapter + String json2 = gson.toJson(new CustomClass2()); + assertEquals("{}", json2); + + // Should use default instance creator + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals(CustomClass3.NO_ARG_CONSTRUCTOR_VALUE, customClass3.s); + } + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a custom + * Gson instance (created using a GsonBuilder) should not affect the Gson instance + * it came from. + */ + public void testNewBuilderModification() { + Gson gson = new GsonBuilder() + .registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }) + .registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }) + .registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }) + .create(); + + assertCustomGson(gson); + + // Modify `gson.newBuilder()` + GsonBuilder gsonBuilder = gson.newBuilder(); + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("overwritten custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("overwritten custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("overwritten custom-instance"); + } + }); + + // `gson` object should not have been affected by changes to new GsonBuilder + assertCustomGson(gson); + // New GsonBuilder based on `gson` should not have been affected either + assertCustomGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should be affected by changes + Gson otherGson = gsonBuilder.create(); + String json1 = otherGson.toJson(new CustomClass1()); + assertEquals("\"overwritten custom-adapter\"", json1); + + String json2 = otherGson.toJson(new CustomClass2()); + assertEquals("\"overwritten custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = otherGson.fromJson("{}", CustomClass3.class); + assertEquals("overwritten custom-instance", customClass3.s); + } + + private static void assertCustomGson(Gson gson) { + String json1 = gson.toJson(new CustomClass1()); + assertEquals("\"custom-adapter\"", json1); + + String json2 = gson.toJson(new CustomClass2()); + assertEquals("\"custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals("custom-instance", customClass3.s); + } + + static class CustomClass1 { } + static class CustomClass2 { } + static class CustomClass3 { + static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance"; + + final String s; + + public CustomClass3(String s) { + this.s = s; + } + + public CustomClass3() { + this(NO_ARG_CONSTRUCTOR_VALUE); + } + >>>>>>> TEMP_RIGHT_BRANCH + } +3:216,581c + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a + * {@code new Gson()} should not affect the Gson instance it came from. + */ + public void testDefaultGsonNewBuilderModification() { + Gson gson = new Gson(); + GsonBuilder gsonBuilder = gson.newBuilder(); + + // Modifications of `gsonBuilder` should not affect `gson` object + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }); + + assertDefaultGson(gson); + // New GsonBuilder created from `gson` should not have been affected by changes either + assertDefaultGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should use custom adapters + assertCustomGson(gsonBuilder.create()); + } + + private static void assertDefaultGson(Gson gson) { + // Should use default reflective adapter + String json1 = gson.toJson(new CustomClass1()); + assertEquals("{}", json1); + + // Should use default reflective adapter + String json2 = gson.toJson(new CustomClass2()); + assertEquals("{}", json2); + + // Should use default instance creator + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals(CustomClass3.NO_ARG_CONSTRUCTOR_VALUE, customClass3.s); + } + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a custom + * Gson instance (created using a GsonBuilder) should not affect the Gson instance + * it came from. + */ + public void testNewBuilderModification() { + Gson gson = new GsonBuilder() + .registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }) + .registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }) + .registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }) + .create(); + + assertCustomGson(gson); + + // Modify `gson.newBuilder()` + GsonBuilder gsonBuilder = gson.newBuilder(); + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("overwritten custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("overwritten custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("overwritten custom-instance"); + } + }); + + // `gson` object should not have been affected by changes to new GsonBuilder + assertCustomGson(gson); + // New GsonBuilder based on `gson` should not have been affected either + assertCustomGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should be affected by changes + Gson otherGson = gsonBuilder.create(); + String json1 = otherGson.toJson(new CustomClass1()); + assertEquals("\"overwritten custom-adapter\"", json1); + + String json2 = otherGson.toJson(new CustomClass2()); + assertEquals("\"overwritten custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = otherGson.fromJson("{}", CustomClass3.class); + assertEquals("overwritten custom-instance", customClass3.s); + } + + private static void assertCustomGson(Gson gson) { + String json1 = gson.toJson(new CustomClass1()); + assertEquals("\"custom-adapter\"", json1); + + String json2 = gson.toJson(new CustomClass2()); + assertEquals("\"custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals("custom-instance", customClass3.s); + } + + static class CustomClass1 { } + static class CustomClass2 { } + static class CustomClass3 { + static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance"; + + final String s; + + public CustomClass3(String s) { + this.s = s; + } + + public CustomClass3() { + this(NO_ARG_CONSTRUCTOR_VALUE); + } + } + + /** + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + try { + if (callCount++ == 0) { + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } finally { + callCount--; + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + } diff --git a/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_patience/diff_Gson.java.txt b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_patience/diff_Gson.java.txt new file mode 100644 index 0000000000..9f73ce3ff0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_patience/diff_Gson.java.txt @@ -0,0 +1,1216 @@ +====1 +1:19,37c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; + +2:18a +3:18a +====1 +1:53a +2:35c +3:35c + import com.google.gson.internal.bind.SerializationDelegatingTypeAdapter; +==== +1:60a +2:43,70c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + <<<<<<< HEAD + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + ||||||| 47dea2ee + ======= + import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + >>>>>>> TEMP_RIGHT_BRANCH + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +3:43,63c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +====1 +1:78c + * String json = gson.toJson(target); // serializes target to Json +2:88c +3:81c + * String json = gson.toJson(target); // serializes target to JSON +====1 +1:82,86c + *

    If the object that your are serializing/deserializing is a {@code ParameterizedType} + * (i.e. contains at least one type parameter and may be an array) then you must use the + * {@link #toJson(Object, Type)} or {@link #fromJson(String, Type)} method. Here is an + * example for serializing and deserializing a {@code ParameterizedType}: + * +2:92,98c +3:85,91c + *

    If the type of the object that you are converting is a {@code ParameterizedType} + * (i.e. has at least one type argument, for example {@code List}) then for + * deserialization you must use a {@code fromJson} method with {@link Type} or {@link TypeToken} + * parameter to specify the parameterized type. For serialization specifying a {@code Type} + * or {@code TypeToken} is optional, otherwise Gson will use the runtime type of the object. + * {@link TypeToken} is a class provided by Gson which helps creating parameterized types. + * Here is an example showing how this can be done: +====1 +1:88,90c + * Type listType = new TypeToken<List<String>>() {}.getType(); + * List<String> target = new LinkedList<String>(); + * target.add("blah"); +2:100,102c +3:93,95c + * TypeToken<List<MyType>> listType = new TypeToken<List<MyType>>() {}; + * List<MyType> target = new LinkedList<MyType>(); + * target.add(new MyType(1, "abc")); +====1 +1:93,94c + * String json = gson.toJson(target, listType); + * List<String> target2 = gson.fromJson(json, listType); +2:105,110c +3:98,103c + * // For serialization you normally do not have to specify the type, Gson will use + * // the runtime type of the objects, however you can also specify it explicitly + * String json = gson.toJson(target, listType.getType()); + * + * // But for deserialization you have to specify the type + * List<MyType> target2 = gson.fromJson(json, listType); +====1 +1:97c + *

    See the Gson User Guide +2:113c +3:106c + *

    See the Gson User Guide +====1 +1:100c + * @see com.google.gson.reflect.TypeToken +2:116,143c +3:109,136c + *

    Lenient JSON handling

    + * For legacy reasons most of the {@code Gson} methods allow JSON data which does not + * comply with the JSON specification, regardless of whether {@link GsonBuilder#setLenient()} + * is used or not. If this behavior is not desired, the following workarounds can be used: + * + *

    Serialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be serialized + *
    2. When using an existing {@code JsonWriter}, manually apply the writer settings of this + * {@code Gson} instance listed by {@link #newJsonWriter(Writer)}.
      + * Otherwise, when not using an existing {@code JsonWriter}, use {@link #newJsonWriter(Writer)} + * to construct one. + *
    3. Call {@link TypeAdapter#write(JsonWriter, Object)} + *
    + * + *

    Deserialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be deserialized + *
    2. When using an existing {@code JsonReader}, manually apply the reader settings of this + * {@code Gson} instance listed by {@link #newJsonReader(Reader)}.
      + * Otherwise, when not using an existing {@code JsonReader}, use {@link #newJsonReader(Reader)} + * to construct one. + *
    3. Call {@link TypeAdapter#read(JsonReader)} + *
    4. Call {@link JsonReader#peek()} and verify that the result is {@link JsonToken#END_DOCUMENT} + * to make sure there is no trailing data + *
    + * + * @see TypeToken +====1 +1:120c + private static final TypeToken NULL_KEY_SURROGATE = TypeToken.get(Object.class); +2:162a +3:155a +==== +1:130,133c + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal, FutureTypeAdapter>>(); + + private final Map, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); +2:172,189c + <<<<<<< HEAD + // Uses LinkedHashMap because iteration order is important, see getAdapter() implementation below + private final ThreadLocal, TypeAdapter>> calls = new ThreadLocal<>(); + ||||||| 47dea2ee + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal, FutureTypeAdapter>>(); + ======= + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal<>(); + >>>>>>> TEMP_RIGHT_BRANCH + + <<<<<<< HEAD + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); + ||||||| 47dea2ee + private final Map, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); + ======= + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap<>(); + >>>>>>> TEMP_RIGHT_BRANCH +3:165,168c + // Uses LinkedHashMap because iteration order is important, see getAdapter() implementation below + private final ThreadLocal, TypeAdapter>> calls = new ThreadLocal<>(); + + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap<>(); +====1 +1:158a +2:215c +3:194c + final List reflectionFilters; +====1 +1:185c + *
  • The default field naming policy for the output Json is same as in Java. So, a Java class +2:242c +3:221c + *
  • The default field naming policy for the output JSON is same as in Java. So, a Java class +====1 +1:187c + * Json. The same rules are applied for mapping incoming Json to the Java classes. You can +2:244c +3:223c + * JSON. The same rules are applied for mapping incoming JSON to the Java classes. You can +====1 +1:202c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY); +2:259,260c +3:238,239c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====1 +1:214c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy) { +2:272,273c +3:251,252c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy, + List reflectionFilters) { +====1 +1:218c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe); +2:277c +3:256c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe, reflectionFilters); +====1 +1:234a +2:294c +3:273c + this.reflectionFilters = reflectionFilters; +====1 +1:236c + List factories = new ArrayList(); +2:296c +3:275c + List factories = new ArrayList<>(); +====1 +1:299c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory)); +2:359c +3:338c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory, reflectionFilters)); +====1 +1:308a +2:369c +3:348c + * @since 2.8.3 +====1 +1:371c + out.value(value); +2:432c +3:411c + out.value(doubleValue); +====1 +1:395c + out.value(value); +2:456,459c +3:435,438c + // For backward compatibility don't call `JsonWriter.value(float)` because that method has + // been newly added and not all custom JsonWriter implementations might override it yet + Number floatNumber = value instanceof Float ? value : floatValue; + out.value(floatNumber); +====1 +1:452c + List list = new ArrayList(); +2:516c +3:495c + List list = new ArrayList<>(); +====1 +1:475c + @SuppressWarnings("unchecked") +2:538a +3:517a +====1 +1:477c + TypeAdapter cached = typeTokenCache.get(type == null ? NULL_KEY_SURROGATE : type); +2:540,541c +3:519,520c + Objects.requireNonNull(type, "type must not be null"); + TypeAdapter cached = typeTokenCache.get(type); +====1 +1:479c + return (TypeAdapter) cached; +2:543,545c +3:522,524c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) cached; + return adapter; +====1 +1:482,483c + Map, FutureTypeAdapter> threadCalls = calls.get(); + boolean requiresThreadLocalCleanup = false; +2:548,549c +3:527,528c + LinkedHashMap, TypeAdapter> threadCalls = calls.get(); + boolean isInitialAdapterRequest = false; +==== +1:485c + threadCalls = new HashMap, FutureTypeAdapter>(); +2:551,557c + <<<<<<< HEAD + threadCalls = new LinkedHashMap<>(); + ||||||| 47dea2ee + threadCalls = new HashMap, FutureTypeAdapter>(); + ======= + threadCalls = new HashMap<>(); + >>>>>>> TEMP_RIGHT_BRANCH +3:530c + threadCalls = new LinkedHashMap<>(); +====1 +1:487c + requiresThreadLocalCleanup = true; +2:559c +3:532c + isInitialAdapterRequest = true; +==== +1:491c + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); +2:563,570c + <<<<<<< HEAD + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); + ||||||| 47dea2ee + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); + ======= + @SuppressWarnings("unchecked") + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); + >>>>>>> TEMP_RIGHT_BRANCH +3:536,537c + @SuppressWarnings("unchecked") + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); +====1 +1:495a +2:575,576c +3:542,543c + int existingAdaptersCount = threadCalls.size(); + boolean foundCandidate = false; +====1 +1:497c + FutureTypeAdapter call = new FutureTypeAdapter(); +2:578c +3:545c + FutureTypeAdapter call = new FutureTypeAdapter<>(); +====2 +1:502a +3:550a +2:584,590c + @SuppressWarnings("unchecked") + TypeAdapter existingAdapter = (TypeAdapter) typeTokenCache.putIfAbsent(type, candidate); + // If other thread concurrently added adapter prefer that one instead + if (existingAdapter != null) { + candidate = existingAdapter; + } + +==== +1:504c + typeTokenCache.put(type, candidate); +2:592,609c + <<<<<<< HEAD + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + } + foundCandidate = true; + ||||||| 47dea2ee + typeTokenCache.put(type, candidate); + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:552,570c + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + + @SuppressWarnings("unchecked") + TypeAdapter actualAdapter = (TypeAdapter) typeTokenCache.get(type); + // Prefer the actual adapter, in case putIfAbsent call above had no effect because other + // thread already concurrently added other adapter instance for the same type + candidate = actualAdapter; + } + foundCandidate = true; +====1 +1:510,512c + threadCalls.remove(type); + + if (requiresThreadLocalCleanup) { +2:615c +3:576c + if (isInitialAdapterRequest) { +==== +1:514a +2:618,635c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +3:579,596c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +====1 +1:607c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:728c +3:689c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:611,612c + * @param src the object for which Json representation is to be created setting for Gson + * @return Json representation of {@code src}. +2:732,733c +3:693,694c + * @param src the object for which JSON representation is to be created + * @return JSON representation of {@code src}. +====1 +1:613a +2:735,736c +3:696,697c + * + * @see #toJsonTree(Object, Type) +====1 +1:636a +2:760,761c +3:721,722c + * + * @see #toJsonTree(Object) +====1 +1:645c + * This method serializes the specified object into its equivalent Json representation. +2:770c +3:731c + * This method serializes the specified object into its equivalent JSON representation. +====1 +1:649c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:774c +3:735c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:654c + * @param src the object for which Json representation is to be created setting for Gson +2:779c +3:740c + * @param src the object for which JSON representation is to be created +====1 +1:655a +2:781,783c +3:742,744c + * + * @see #toJson(Object, Appendable) + * @see #toJson(Object, Type) +====1 +1:666c + * equivalent Json representation. This method must be used if the specified object is a generic +2:794c +3:755c + * equivalent JSON representation. This method must be used if the specified object is a generic +====1 +1:677c + * @return Json representation of {@code src} +2:805,808c +3:766,769c + * @return JSON representation of {@code src} + * + * @see #toJson(Object, Type, Appendable) + * @see #toJson(Object) +====1 +1:686c + * This method serializes the specified object into its equivalent Json representation. +2:817,818c +3:778,779c + * This method serializes the specified object into its equivalent JSON representation and + * writes it to the writer. +====1 +1:690c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:822c +3:783c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:694,695c + * @param src the object for which Json representation is to be created setting for Gson + * @param writer Writer to which the Json representation needs to be written +2:826,827c +3:787,788c + * @param src the object for which JSON representation is to be created + * @param writer Writer to which the JSON representation needs to be written +====1 +1:697a +2:830,832c +3:791,793c + * + * @see #toJson(Object) + * @see #toJson(Object, Type, Appendable) +====1 +1:709,710c + * equivalent Json representation. This method must be used if the specified object is a generic + * type. For non-generic objects, use {@link #toJson(Object, Appendable)} instead. +2:844,846c +3:805,807c + * equivalent JSON representation and writes it to the writer. + * This method must be used if the specified object is a generic type. For non-generic objects, + * use {@link #toJson(Object, Appendable)} instead. +====1 +1:719c + * @param writer Writer to which the Json representation of src needs to be written. +2:855c +3:816c + * @param writer Writer to which the JSON representation of src needs to be written. +====1 +1:721a +2:858,860c +3:819,821c + * + * @see #toJson(Object, Type) + * @see #toJson(Object, Appendable) +====1 +1:734a +2:874,882c +3:835,843c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====1 +1:737c + @SuppressWarnings("unchecked") +2:884a +3:845a +====1 +1:739c + TypeAdapter adapter = getAdapter(TypeToken.get(typeOfSrc)); +2:886,887c +3:847,848c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) getAdapter(TypeToken.get(typeOfSrc)); +====1 +1:747c + ((TypeAdapter) adapter).write(writer, src); +2:895c +3:856c + adapter.write(writer, src); +====1 +1:778c + * @param writer Writer to which the Json representation needs to be written +2:926c +3:887c + * @param writer Writer to which the JSON representation needs to be written +====1 +1:832a +2:981,989c +3:942,950c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====1 +1:858c + * This method deserializes the specified Json into an object of the specified class. It is not +2:1015c +3:976c + * This method deserializes the specified JSON into an object of the specified class. It is not +====1 +1:864c + * {@link #fromJson(String, Type)}. If you have the Json in a {@link Reader} instead of +2:1021c +3:982c + * {@link #fromJson(String, TypeToken)}. If you have the JSON in a {@link Reader} instead of +====1 +1:866a +2:1024,1026c +3:985,987c + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:873a +2:1034,1036c +3:995,997c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(String, TypeToken) +====1 +1:876c + Object object = fromJson(json, (Type) classOfT); +2:1039c +3:1000c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:881c + * This method deserializes the specified Json into an object of the specified type. This method +2:1044c +3:1005c + * This method deserializes the specified JSON into an object of the specified type. This method +====1 +1:883c + * {@link #fromJson(String, Class)} instead. If you have the Json in a {@link Reader} instead of +2:1046c +3:1007c + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of +====1 +1:885a +2:1049,1082c +3:1010,1043c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(String, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, + * or if there is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is + * not desired. + * + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the string. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(String, Class) + * @see #fromJson(String, TypeToken) + */ + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the specified JSON into an object of the specified type. This method + * is useful if the specified object is a generic type. For non-generic objects, use + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of + * a String, use {@link #fromJson(Reader, TypeToken)} instead. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * +====1 +1:888,889c + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for +2:1085,1086c +3:1046,1047c + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for +====1 +1:892c + * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType(); +2:1089c +3:1050c + * new TypeToken<Collection<Foo>>(){} +====1 +1:896,897c + * @throws JsonParseException if json is not a valid representation for an object of type typeOfT + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1093,1097c +3:1054,1058c + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(String, Class) + * @since 2.10 +====1 +1:899,900c + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { +2:1099c +3:1060c + public T fromJson(String json, TypeToken typeOfT) throws JsonSyntaxException { +====1 +1:905,906c + T target = (T) fromJson(reader, typeOfT); + return target; +2:1104c +3:1065c + return fromJson(reader, typeOfT); +====1 +1:910c + * This method deserializes the Json read from the specified reader into an object of the +2:1108c +3:1069c + * This method deserializes the JSON read from the specified reader into an object of the +====1 +1:914c + * this method works fine if the any of the fields of the specified object are generics, just the +2:1112c +3:1073c + * this method works fine if any of the fields of the specified object are generics, just the +====1 +1:916c + * invoke {@link #fromJson(Reader, Type)}. If you have the Json in a String form instead of a +2:1114c +3:1075c + * invoke {@link #fromJson(Reader, TypeToken)}. If you have the JSON in a String form instead of a +====1 +1:918a +2:1117,1119c +3:1078,1080c + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:920c + * @param json the reader producing the Json from which the object is to be deserialized. +2:1121c +3:1082c + * @param json the reader producing the JSON from which the object is to be deserialized. +====1 +1:922c + * @return an object of type T from the string. Returns {@code null} if {@code json} is at EOF. +2:1123c +3:1084c + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====1 +1:924c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1125c +3:1086c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====1 +1:925a +2:1127,1129c +3:1088,1090c + * + * @see #fromJson(String, Class) + * @see #fromJson(Reader, TypeToken) +====1 +1:928,930c + JsonReader jsonReader = newJsonReader(json); + Object object = fromJson(jsonReader, classOfT); + assertFullConsumption(object, jsonReader); +2:1132c +3:1093c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:935c + * This method deserializes the Json read from the specified reader into an object of the +2:1137c +3:1098c + * This method deserializes the JSON read from the specified reader into an object of the +====1 +1:937c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the Json in a +2:1139c +3:1100c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a +====1 +1:939a +2:1142,1149c +3:1103,1110c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(Reader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:941,948c + * @param json the reader producing Json from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is at EOF. +2:1151,1153c +3:1112,1114c + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====1 +1:950c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1155c +3:1116c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====1 +1:951a +2:1157,1160c +3:1118,1121c + * + * @see #fromJson(String, Type) + * @see #fromJson(Reader, Class) + * @see #fromJson(Reader, TypeToken) +====1 +1:954a +2:1164,1192c +3:1125,1153c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified reader into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a + * String form instead of a {@link Reader}, use {@link #fromJson(String, TypeToken)} instead. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * + * @param the type of the desired object + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. + * @throws JsonIOException if there was a problem reading from the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type of typeOfT + * + * @see #fromJson(String, TypeToken) + * @see #fromJson(Reader, Class) + * @since 2.10 + */ + public T fromJson(Reader json, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====1 +1:956c + T object = (T) fromJson(jsonReader, typeOfT); +2:1194c +3:1155c + T object = fromJson(jsonReader, typeOfT); +====1 +1:964c + throw new JsonIOException("JSON document was not fully consumed."); +2:1202c +3:1163c + throw new JsonSyntaxException("JSON document was not fully consumed."); +====1 +1:972a +2:1211,1213c +3:1172,1174c + // fromJson(JsonReader, Class) is unfortunately missing and cannot be added now without breaking + // source compatibility in certain cases, see https://github.com/google/gson/pull/1700#discussion_r973764414 + +====1 +1:974c + * Reads the next JSON value from {@code reader} and convert it to an object +2:1215c +3:1176c + * Reads the next JSON value from {@code reader} and converts it to an object +====1 +1:976c + * Since Type is not parameterized by T, this method is type unsafe and should be used carefully +2:1216a +3:1177a +====1 +1:978,979c + * @throws JsonIOException if there was a problem writing to the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1218,1239c +3:1179,1200c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonReader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. If the provided type is a + * {@code Class} the {@code TypeToken} can be created with {@link TypeToken#get(Class)}. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonReader, TypeToken) +====1 +1:982a +2:1243,1276c +3:1204,1237c + return (T) fromJson(reader, TypeToken.get(typeOfT)); + } + + /** + * Reads the next JSON value from {@code reader} and converts it to an object + * of type {@code typeOfT}. Returns {@code null}, if the {@code reader} is at EOF. + * This method is useful if the specified object is a generic type. For non-generic objects, + * {@link #fromJson(JsonReader, Type)} can be called, or {@link TypeToken#get(Class)} can + * be used to create the type token. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonReader, Type) + * @since 2.10 + */ + public T fromJson(JsonReader reader, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====1 +1:989,990c + TypeToken typeToken = (TypeToken) TypeToken.get(typeOfT); + TypeAdapter typeAdapter = getAdapter(typeToken); +2:1283c +3:1244c + TypeAdapter typeAdapter = getAdapter(typeOfT); +====1 +1:1017c + * This method deserializes the Json read from the specified parse tree into an object of the +2:1310c +3:1271c + * This method deserializes the JSON read from the specified parse tree into an object of the +====1 +1:1021c + * this method works fine if the any of the fields of the specified object are generics, just the +2:1314c +3:1275c + * this method works fine if any of the fields of the specified object are generics, just the +====1 +1:1023c + * invoke {@link #fromJson(JsonElement, Type)}. +2:1316,1317c +3:1277,1278c + * invoke {@link #fromJson(JsonElement, TypeToken)}. + * +====1 +1:1028c + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +2:1322c +3:1283c + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====1 +1:1030c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +2:1324c +3:1285c + * @throws JsonSyntaxException if json is not a valid representation for an object of type classOfT +====1 +1:1031a +2:1326,1328c +3:1287,1289c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(JsonElement, TypeToken) +====1 +1:1034c + Object object = fromJson(json, (Type) classOfT); +2:1331c +3:1292c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:1039c + * This method deserializes the Json read from the specified parse tree into an object of the +2:1336c +3:1297c + * This method deserializes the JSON read from the specified parse tree into an object of the +====1 +1:1042a +2:1340,1344c +3:1301,1305c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonElement, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * +====1 +1:1046,1052c + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +2:1348,1349c +3:1309,1310c + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====1 +1:1055a +2:1353,1356c +3:1314,1317c + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonElement, Class) + * @see #fromJson(JsonElement, TypeToken) +====1 +1:1058a +2:1360,1385c +3:1321,1346c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified parse tree into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(JsonElement, Class)} instead. + * + * @param the type of the desired object + * @param json the root of the parse tree of {@link JsonElement}s from which the object is to + * be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *
    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonElement, Class) + * @since 2.10 + */ + public T fromJson(JsonElement json, TypeToken typeOfT) throws JsonSyntaxException { +====1 +1:1062c + return (T) fromJson(new JsonTreeReader(json), typeOfT); +2:1389c +3:1350c + return fromJson(new JsonTreeReader(json), typeOfT); +==== +1:1065,1066c + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate; +2:1392,1402c + <<<<<<< HEAD + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; + ||||||| 47dea2ee + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate; + ======= + static class FutureTypeAdapter extends SerializationDelegatingTypeAdapter { + private TypeAdapter delegate; + >>>>>>> TEMP_RIGHT_BRANCH +3:1353,1355c + static class FutureTypeAdapter extends SerializationDelegatingTypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; +==== +1:1075c + @Override public T read(JsonReader in) throws IOException { +2:1411,1420c + <<<<<<< HEAD + public void markBroken() { + isBroken = true; + } + + private TypeAdapter getResolvedDelegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } +3:1364,1372c + public void markBroken() { + isBroken = true; + } + + private TypeAdapter delegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } +====1 +1:1077c + throw new IllegalStateException(); +2:1422,1425c +3:1374,1377c + // Can occur when adapter is leaked to other thread or when adapter is used for (de-)serialization + // directly within the TypeAdapterFactory which requested it + throw new IllegalStateException("Adapter for type with cyclic dependency has been used" + + " before dependency has been resolved"); +==== +1:1079c + return delegate.read(in); +2:1427,1452c + return delegate; + ||||||| 47dea2ee + ======= + private TypeAdapter delegate() { + if (delegate == null) { + throw new IllegalStateException("Delegate has not been set yet"); + } + return delegate; + } + + @Override public TypeAdapter getSerializationDelegate() { + return delegate(); + >>>>>>> TEMP_RIGHT_BRANCH + } + + @Override public T read(JsonReader in) throws IOException { + <<<<<<< HEAD + return getResolvedDelegate().read(in); + ||||||| 47dea2ee + if (delegate == null) { + throw new IllegalStateException(); + } + return delegate.read(in); + ======= + return delegate().read(in); + >>>>>>> TEMP_RIGHT_BRANCH +3:1379,1387c + return delegate; + } + + @Override public TypeAdapter getSerializationDelegate() { + return delegate(); + } + + @Override public T read(JsonReader in) throws IOException { + return delegate().read(in); +==== +1:1083,1086c + if (delegate == null) { + throw new IllegalStateException(); + } + delegate.write(out, value); +2:1456,1465c + <<<<<<< HEAD + getResolvedDelegate().write(out, value); + ||||||| 47dea2ee + if (delegate == null) { + throw new IllegalStateException(); + } + delegate.write(out, value); + ======= + delegate().write(out, value); + >>>>>>> TEMP_RIGHT_BRANCH +3:1391c + delegate().write(out, value); diff --git a/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_patience/diff_GsonTest.java.txt b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_patience/diff_GsonTest.java.txt new file mode 100644 index 0000000000..ababbc30f8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/gitmerge_recursive_patience/diff_GsonTest.java.txt @@ -0,0 +1,897 @@ +====1 +1:18a +2:19c +3:19c + import com.google.gson.Gson.FutureTypeAdapter; +====1 +1:19a +2:21c +3:21c + import com.google.gson.reflect.TypeToken; +====1 +1:29a +2:32c +3:32c + import java.util.Collections; +==== +1:30a +2:34,40c + <<<<<<< HEAD + import java.util.concurrent.CountDownLatch; + ||||||| 47dea2ee + ======= + import java.util.concurrent.atomic.AtomicInteger; + >>>>>>> TEMP_RIGHT_BRANCH + import java.util.concurrent.atomic.AtomicReference; +3:34,36c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.atomic.AtomicInteger; + import java.util.concurrent.atomic.AtomicReference; +====1 +1:59c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +2:69,70c +3:65,66c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====1 +1:73c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +2:84,85c +3:80,81c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +==== +1:88a +2:101,163c + public void testGetAdapter_Null() { + Gson gson = new Gson(); + try { + gson.getAdapter((TypeToken) null); + fail(); + } catch (NullPointerException e) { + assertEquals("type must not be null", e.getMessage()); + } + } + + public void testGetAdapter_Concurrency() { + class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for test"); + } + } + + final AtomicInteger adapterInstancesCreated = new AtomicInteger(0); + final AtomicReference> threadAdapter = new AtomicReference<>(); + final Class requestedType = Number.class; + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + private volatile boolean isFirstCall = true; + + @Override public TypeAdapter create(final Gson gson, TypeToken type) { + if (isFirstCall) { + isFirstCall = false; + + // Create a separate thread which requests an adapter for the same type + // This will cause this factory to return a different adapter instance than + // the one it is currently creating + Thread thread = new Thread() { + @Override public void run() { + threadAdapter.set(gson.getAdapter(requestedType)); + } + }; + thread.start(); + try { + thread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + // Create a new dummy adapter instance + adapterInstancesCreated.incrementAndGet(); + return new DummyAdapter<>(); + } + }) + .create(); + + TypeAdapter adapter = gson.getAdapter(requestedType); + assertTrue(adapter instanceof DummyAdapter); + assertEquals(2, adapterInstancesCreated.get()); + // Should be the same adapter instance the concurrent thread received + assertSame(threadAdapter.get(), adapter); + } + +3:97,149c + public void testGetAdapter_Null() { + Gson gson = new Gson(); + try { + gson.getAdapter((TypeToken) null); + fail(); + } catch (NullPointerException e) { + assertEquals("type must not be null", e.getMessage()); + } + } + + public void testGetAdapter_Concurrency() { + final AtomicInteger adapterInstancesCreated = new AtomicInteger(0); + final AtomicReference> threadAdapter = new AtomicReference<>(); + final Class requestedType = Number.class; + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + private volatile boolean isFirstCall = true; + + @Override public TypeAdapter create(final Gson gson, TypeToken type) { + if (isFirstCall) { + isFirstCall = false; + + // Create a separate thread which requests an adapter for the same type + // This will cause this factory to return a different adapter instance than + // the one it is currently creating + Thread thread = new Thread() { + @Override public void run() { + threadAdapter.set(gson.getAdapter(requestedType)); + } + }; + thread.start(); + try { + thread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + // Create a new dummy adapter instance + adapterInstancesCreated.incrementAndGet(); + return new DummyAdapter<>(); + } + }) + .create(); + + TypeAdapter adapter = gson.getAdapter(requestedType); + assertTrue(adapter instanceof DummyAdapter); + assertEquals(2, adapterInstancesCreated.get()); + // Should be the same adapter instance the concurrent thread received + assertSame(threadAdapter.get(), adapter); + } + +==== +1:154a +2:230,593c + + /** + <<<<<<< HEAD + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + if (callCount == 0) { + callCount++; + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + ||||||| 47dea2ee + ======= + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a + * {@code new Gson()} should not affect the Gson instance it came from. + */ + public void testDefaultGsonNewBuilderModification() { + Gson gson = new Gson(); + GsonBuilder gsonBuilder = gson.newBuilder(); + + // Modifications of `gsonBuilder` should not affect `gson` object + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }); + + assertDefaultGson(gson); + // New GsonBuilder created from `gson` should not have been affected by changes either + assertDefaultGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should use custom adapters + assertCustomGson(gsonBuilder.create()); + } + + private static void assertDefaultGson(Gson gson) { + // Should use default reflective adapter + String json1 = gson.toJson(new CustomClass1()); + assertEquals("{}", json1); + + // Should use default reflective adapter + String json2 = gson.toJson(new CustomClass2()); + assertEquals("{}", json2); + + // Should use default instance creator + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals(CustomClass3.NO_ARG_CONSTRUCTOR_VALUE, customClass3.s); + } + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a custom + * Gson instance (created using a GsonBuilder) should not affect the Gson instance + * it came from. + */ + public void testNewBuilderModification() { + Gson gson = new GsonBuilder() + .registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }) + .registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }) + .registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }) + .create(); + + assertCustomGson(gson); + + // Modify `gson.newBuilder()` + GsonBuilder gsonBuilder = gson.newBuilder(); + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("overwritten custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("overwritten custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("overwritten custom-instance"); + } + }); + + // `gson` object should not have been affected by changes to new GsonBuilder + assertCustomGson(gson); + // New GsonBuilder based on `gson` should not have been affected either + assertCustomGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should be affected by changes + Gson otherGson = gsonBuilder.create(); + String json1 = otherGson.toJson(new CustomClass1()); + assertEquals("\"overwritten custom-adapter\"", json1); + + String json2 = otherGson.toJson(new CustomClass2()); + assertEquals("\"overwritten custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = otherGson.fromJson("{}", CustomClass3.class); + assertEquals("overwritten custom-instance", customClass3.s); + } + + private static void assertCustomGson(Gson gson) { + String json1 = gson.toJson(new CustomClass1()); + assertEquals("\"custom-adapter\"", json1); + + String json2 = gson.toJson(new CustomClass2()); + assertEquals("\"custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals("custom-instance", customClass3.s); + } + + static class CustomClass1 { } + static class CustomClass2 { } + static class CustomClass3 { + static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance"; + + final String s; + + public CustomClass3(String s) { + this.s = s; + } + + public CustomClass3() { + this(NO_ARG_CONSTRUCTOR_VALUE); + } + >>>>>>> TEMP_RIGHT_BRANCH + } +3:216,581c + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a + * {@code new Gson()} should not affect the Gson instance it came from. + */ + public void testDefaultGsonNewBuilderModification() { + Gson gson = new Gson(); + GsonBuilder gsonBuilder = gson.newBuilder(); + + // Modifications of `gsonBuilder` should not affect `gson` object + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }); + + assertDefaultGson(gson); + // New GsonBuilder created from `gson` should not have been affected by changes either + assertDefaultGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should use custom adapters + assertCustomGson(gsonBuilder.create()); + } + + private static void assertDefaultGson(Gson gson) { + // Should use default reflective adapter + String json1 = gson.toJson(new CustomClass1()); + assertEquals("{}", json1); + + // Should use default reflective adapter + String json2 = gson.toJson(new CustomClass2()); + assertEquals("{}", json2); + + // Should use default instance creator + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals(CustomClass3.NO_ARG_CONSTRUCTOR_VALUE, customClass3.s); + } + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a custom + * Gson instance (created using a GsonBuilder) should not affect the Gson instance + * it came from. + */ + public void testNewBuilderModification() { + Gson gson = new GsonBuilder() + .registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }) + .registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }) + .registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }) + .create(); + + assertCustomGson(gson); + + // Modify `gson.newBuilder()` + GsonBuilder gsonBuilder = gson.newBuilder(); + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("overwritten custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("overwritten custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("overwritten custom-instance"); + } + }); + + // `gson` object should not have been affected by changes to new GsonBuilder + assertCustomGson(gson); + // New GsonBuilder based on `gson` should not have been affected either + assertCustomGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should be affected by changes + Gson otherGson = gsonBuilder.create(); + String json1 = otherGson.toJson(new CustomClass1()); + assertEquals("\"overwritten custom-adapter\"", json1); + + String json2 = otherGson.toJson(new CustomClass2()); + assertEquals("\"overwritten custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = otherGson.fromJson("{}", CustomClass3.class); + assertEquals("overwritten custom-instance", customClass3.s); + } + + private static void assertCustomGson(Gson gson) { + String json1 = gson.toJson(new CustomClass1()); + assertEquals("\"custom-adapter\"", json1); + + String json2 = gson.toJson(new CustomClass2()); + assertEquals("\"custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals("custom-instance", customClass3.s); + } + + static class CustomClass1 { } + static class CustomClass2 { } + static class CustomClass3 { + static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance"; + + final String s; + + public CustomClass3(String s) { + this.s = s; + } + + public CustomClass3() { + this(NO_ARG_CONSTRUCTOR_VALUE); + } + } + + /** + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + try { + if (callCount++ == 0) { + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } finally { + callCount--; + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + } diff --git a/src/python/merge_conflict_analysis_diffs/1006/intellimerge/diff_Gson.java.txt b/src/python/merge_conflict_analysis_diffs/1006/intellimerge/diff_Gson.java.txt new file mode 100644 index 0000000000..d82378822a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/intellimerge/diff_Gson.java.txt @@ -0,0 +1,1234 @@ +====1 +1:19,37c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; + +2:18a +3:18a +====1 +1:53a +2:35c +3:35c + import com.google.gson.internal.bind.SerializationDelegatingTypeAdapter; +==== +1:60a +2:43,86c + <<<<<<< HEAD + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; + ||||||| 47dea2ee + ======= + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; + >>>>>>> TEMP_RIGHT_BRANCH +3:43,63c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +====1 +1:78c + * String json = gson.toJson(target); // serializes target to Json +2:104c +3:81c + * String json = gson.toJson(target); // serializes target to JSON +====1 +1:82,86c + *

    If the object that your are serializing/deserializing is a {@code ParameterizedType} + * (i.e. contains at least one type parameter and may be an array) then you must use the + * {@link #toJson(Object, Type)} or {@link #fromJson(String, Type)} method. Here is an + * example for serializing and deserializing a {@code ParameterizedType}: + * +2:108,114c +3:85,91c + *

    If the type of the object that you are converting is a {@code ParameterizedType} + * (i.e. has at least one type argument, for example {@code List}) then for + * deserialization you must use a {@code fromJson} method with {@link Type} or {@link TypeToken} + * parameter to specify the parameterized type. For serialization specifying a {@code Type} + * or {@code TypeToken} is optional, otherwise Gson will use the runtime type of the object. + * {@link TypeToken} is a class provided by Gson which helps creating parameterized types. + * Here is an example showing how this can be done: +====1 +1:88,90c + * Type listType = new TypeToken<List<String>>() {}.getType(); + * List<String> target = new LinkedList<String>(); + * target.add("blah"); +2:116,118c +3:93,95c + * TypeToken<List<MyType>> listType = new TypeToken<List<MyType>>() {}; + * List<MyType> target = new LinkedList<MyType>(); + * target.add(new MyType(1, "abc")); +====1 +1:93,94c + * String json = gson.toJson(target, listType); + * List<String> target2 = gson.fromJson(json, listType); +2:121,126c +3:98,103c + * // For serialization you normally do not have to specify the type, Gson will use + * // the runtime type of the objects, however you can also specify it explicitly + * String json = gson.toJson(target, listType.getType()); + * + * // But for deserialization you have to specify the type + * List<MyType> target2 = gson.fromJson(json, listType); +====1 +1:97c + *

    See the Gson User Guide +2:129c +3:106c + *

    See the Gson User Guide +====1 +1:100c + * @see com.google.gson.reflect.TypeToken +2:132,159c +3:109,136c + *

    Lenient JSON handling

    + * For legacy reasons most of the {@code Gson} methods allow JSON data which does not + * comply with the JSON specification, regardless of whether {@link GsonBuilder#setLenient()} + * is used or not. If this behavior is not desired, the following workarounds can be used: + * + *

    Serialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be serialized + *
    2. When using an existing {@code JsonWriter}, manually apply the writer settings of this + * {@code Gson} instance listed by {@link #newJsonWriter(Writer)}.
      + * Otherwise, when not using an existing {@code JsonWriter}, use {@link #newJsonWriter(Writer)} + * to construct one. + *
    3. Call {@link TypeAdapter#write(JsonWriter, Object)} + *
    + * + *

    Deserialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be deserialized + *
    2. When using an existing {@code JsonReader}, manually apply the reader settings of this + * {@code Gson} instance listed by {@link #newJsonReader(Reader)}.
      + * Otherwise, when not using an existing {@code JsonReader}, use {@link #newJsonReader(Reader)} + * to construct one. + *
    3. Call {@link TypeAdapter#read(JsonReader)} + *
    4. Call {@link JsonReader#peek()} and verify that the result is {@link JsonToken#END_DOCUMENT} + * to make sure there is no trailing data + *
    + * + * @see TypeToken +====1 +1:120c + private static final TypeToken NULL_KEY_SURROGATE = TypeToken.get(Object.class); +2:178a +3:155a +==== +1:130,133c + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal, FutureTypeAdapter>>(); + + private final Map, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); +2:188,205c + <<<<<<< HEAD + // Uses LinkedHashMap because iteration order is important, see getAdapter() implementation below + private final ThreadLocal, TypeAdapter>> calls = new ThreadLocal<>(); + ||||||| 47dea2ee + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal, FutureTypeAdapter>>(); + ======= + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal<>(); + >>>>>>> TEMP_RIGHT_BRANCH + + <<<<<<< HEAD + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); + ||||||| 47dea2ee + private final Map, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); + ======= + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap<>(); + >>>>>>> TEMP_RIGHT_BRANCH +3:165,168c + // Uses LinkedHashMap because iteration order is important, see getAdapter() implementation below + private final ThreadLocal, TypeAdapter>> calls = new ThreadLocal<>(); + + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap<>(); +====1 +1:158a +2:231c +3:194c + final List reflectionFilters; +====1 +1:185c + *
  • The default field naming policy for the output Json is same as in Java. So, a Java class +2:258c +3:221c + *
  • The default field naming policy for the output JSON is same as in Java. So, a Java class +====1 +1:187c + * Json. The same rules are applied for mapping incoming Json to the Java classes. You can +2:260c +3:223c + * JSON. The same rules are applied for mapping incoming JSON to the Java classes. You can +====1 +1:202c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY); +2:275,276c +3:238,239c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====1 +1:214c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy) { +2:288,289c +3:251,252c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy, + List reflectionFilters) { +====1 +1:218c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe); +2:293c +3:256c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe, reflectionFilters); +====1 +1:234a +2:310c +3:273c + this.reflectionFilters = reflectionFilters; +====1 +1:236c + List factories = new ArrayList(); +2:312c +3:275c + List factories = new ArrayList<>(); +====1 +1:299c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory)); +2:375c +3:338c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory, reflectionFilters)); +====1 +1:308a +2:385c +3:348c + * @since 2.8.3 +====1 +1:371c + out.value(value); +2:448c +3:411c + out.value(doubleValue); +====1 +1:395c + out.value(value); +2:472,475c +3:435,438c + // For backward compatibility don't call `JsonWriter.value(float)` because that method has + // been newly added and not all custom JsonWriter implementations might override it yet + Number floatNumber = value instanceof Float ? value : floatValue; + out.value(floatNumber); +====1 +1:452c + List list = new ArrayList(); +2:532c +3:495c + List list = new ArrayList<>(); +====1 +1:475c + @SuppressWarnings("unchecked") +2:554a +3:517a +====1 +1:477c + TypeAdapter cached = typeTokenCache.get(type == null ? NULL_KEY_SURROGATE : type); +2:556,557c +3:519,520c + Objects.requireNonNull(type, "type must not be null"); + TypeAdapter cached = typeTokenCache.get(type); +====1 +1:479c + return (TypeAdapter) cached; +2:559,561c +3:522,524c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) cached; + return adapter; +====1 +1:482,483c + Map, FutureTypeAdapter> threadCalls = calls.get(); + boolean requiresThreadLocalCleanup = false; +2:564,565c +3:527,528c + LinkedHashMap, TypeAdapter> threadCalls = calls.get(); + boolean isInitialAdapterRequest = false; +==== +1:485c + threadCalls = new HashMap, FutureTypeAdapter>(); +2:567,573c + <<<<<<< HEAD + threadCalls = new LinkedHashMap<>(); + ||||||| 47dea2ee + threadCalls = new HashMap, FutureTypeAdapter>(); + ======= + threadCalls = new HashMap<>(); + >>>>>>> TEMP_RIGHT_BRANCH +3:530c + threadCalls = new LinkedHashMap<>(); +====1 +1:487c + requiresThreadLocalCleanup = true; +2:575c +3:532c + isInitialAdapterRequest = true; +==== +1:491c + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); +2:579,586c + <<<<<<< HEAD + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); + ||||||| 47dea2ee + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); + ======= + @SuppressWarnings("unchecked") + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); + >>>>>>> TEMP_RIGHT_BRANCH +3:536,537c + @SuppressWarnings("unchecked") + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); +====1 +1:495a +2:591,592c +3:542,543c + int existingAdaptersCount = threadCalls.size(); + boolean foundCandidate = false; +====1 +1:497c + FutureTypeAdapter call = new FutureTypeAdapter(); +2:594c +3:545c + FutureTypeAdapter call = new FutureTypeAdapter<>(); +====2 +1:502a +3:550a +2:600,606c + @SuppressWarnings("unchecked") + TypeAdapter existingAdapter = (TypeAdapter) typeTokenCache.putIfAbsent(type, candidate); + // If other thread concurrently added adapter prefer that one instead + if (existingAdapter != null) { + candidate = existingAdapter; + } + +==== +1:504c + typeTokenCache.put(type, candidate); +2:608,625c + <<<<<<< HEAD + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + } + foundCandidate = true; + ||||||| 47dea2ee + typeTokenCache.put(type, candidate); + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:552,570c + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + + @SuppressWarnings("unchecked") + TypeAdapter actualAdapter = (TypeAdapter) typeTokenCache.get(type); + // Prefer the actual adapter, in case putIfAbsent call above had no effect because other + // thread already concurrently added other adapter instance for the same type + candidate = actualAdapter; + } + foundCandidate = true; +====1 +1:510,512c + threadCalls.remove(type); + + if (requiresThreadLocalCleanup) { +2:631c +3:576c + if (isInitialAdapterRequest) { +==== +1:514a +2:634,651c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +3:579,596c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +====1 +1:607c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:744c +3:689c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:611,612c + * @param src the object for which Json representation is to be created setting for Gson + * @return Json representation of {@code src}. +2:748,749c +3:693,694c + * @param src the object for which JSON representation is to be created + * @return JSON representation of {@code src}. +====1 +1:613a +2:751,752c +3:696,697c + * + * @see #toJsonTree(Object, Type) +====1 +1:636a +2:776,777c +3:721,722c + * + * @see #toJsonTree(Object) +====1 +1:645c + * This method serializes the specified object into its equivalent Json representation. +2:786c +3:731c + * This method serializes the specified object into its equivalent JSON representation. +====1 +1:649c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:790c +3:735c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:654c + * @param src the object for which Json representation is to be created setting for Gson +2:795c +3:740c + * @param src the object for which JSON representation is to be created +====1 +1:655a +2:797,799c +3:742,744c + * + * @see #toJson(Object, Appendable) + * @see #toJson(Object, Type) +====1 +1:666c + * equivalent Json representation. This method must be used if the specified object is a generic +2:810c +3:755c + * equivalent JSON representation. This method must be used if the specified object is a generic +====1 +1:677c + * @return Json representation of {@code src} +2:821,824c +3:766,769c + * @return JSON representation of {@code src} + * + * @see #toJson(Object, Type, Appendable) + * @see #toJson(Object) +====1 +1:686c + * This method serializes the specified object into its equivalent Json representation. +2:833,834c +3:778,779c + * This method serializes the specified object into its equivalent JSON representation and + * writes it to the writer. +====1 +1:690c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +2:838c +3:783c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====1 +1:694,695c + * @param src the object for which Json representation is to be created setting for Gson + * @param writer Writer to which the Json representation needs to be written +2:842,843c +3:787,788c + * @param src the object for which JSON representation is to be created + * @param writer Writer to which the JSON representation needs to be written +====1 +1:697a +2:846,848c +3:791,793c + * + * @see #toJson(Object) + * @see #toJson(Object, Type, Appendable) +====1 +1:709,710c + * equivalent Json representation. This method must be used if the specified object is a generic + * type. For non-generic objects, use {@link #toJson(Object, Appendable)} instead. +2:860,862c +3:805,807c + * equivalent JSON representation and writes it to the writer. + * This method must be used if the specified object is a generic type. For non-generic objects, + * use {@link #toJson(Object, Appendable)} instead. +====1 +1:719c + * @param writer Writer to which the Json representation of src needs to be written. +2:871c +3:816c + * @param writer Writer to which the JSON representation of src needs to be written. +====1 +1:721a +2:874,876c +3:819,821c + * + * @see #toJson(Object, Type) + * @see #toJson(Object, Appendable) +====1 +1:734a +2:890,898c +3:835,843c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====1 +1:737c + @SuppressWarnings("unchecked") +2:900a +3:845a +====1 +1:739c + TypeAdapter adapter = getAdapter(TypeToken.get(typeOfSrc)); +2:902,903c +3:847,848c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) getAdapter(TypeToken.get(typeOfSrc)); +====1 +1:747c + ((TypeAdapter) adapter).write(writer, src); +2:911c +3:856c + adapter.write(writer, src); +====1 +1:778c + * @param writer Writer to which the Json representation needs to be written +2:942c +3:887c + * @param writer Writer to which the JSON representation needs to be written +====1 +1:832a +2:997,1005c +3:942,950c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====1 +1:858c + * This method deserializes the specified Json into an object of the specified class. It is not +2:1031c +3:976c + * This method deserializes the specified JSON into an object of the specified class. It is not +====1 +1:864c + * {@link #fromJson(String, Type)}. If you have the Json in a {@link Reader} instead of +2:1037c +3:982c + * {@link #fromJson(String, TypeToken)}. If you have the JSON in a {@link Reader} instead of +====1 +1:866a +2:1040,1042c +3:985,987c + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:873a +2:1050,1052c +3:995,997c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(String, TypeToken) +====1 +1:876c + Object object = fromJson(json, (Type) classOfT); +2:1055c +3:1000c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:881c + * This method deserializes the specified Json into an object of the specified type. This method +2:1060c +3:1005c + * This method deserializes the specified JSON into an object of the specified type. This method +====1 +1:883c + * {@link #fromJson(String, Class)} instead. If you have the Json in a {@link Reader} instead of +2:1062c +3:1007c + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of +====1 +1:885a +2:1065,1098c +3:1010,1043c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(String, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, + * or if there is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is + * not desired. + * + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the string. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(String, Class) + * @see #fromJson(String, TypeToken) + */ + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the specified JSON into an object of the specified type. This method + * is useful if the specified object is a generic type. For non-generic objects, use + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of + * a String, use {@link #fromJson(Reader, TypeToken)} instead. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * +====1 +1:888,889c + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for +2:1101,1102c +3:1046,1047c + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for +====1 +1:892c + * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType(); +2:1105c +3:1050c + * new TypeToken<Collection<Foo>>(){} +====1 +1:896,897c + * @throws JsonParseException if json is not a valid representation for an object of type typeOfT + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1109,1113c +3:1054,1058c + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(String, Class) + * @since 2.10 +====1 +1:899,900c + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { +2:1115c +3:1060c + public T fromJson(String json, TypeToken typeOfT) throws JsonSyntaxException { +====1 +1:905,906c + T target = (T) fromJson(reader, typeOfT); + return target; +2:1120c +3:1065c + return fromJson(reader, typeOfT); +====1 +1:910c + * This method deserializes the Json read from the specified reader into an object of the +2:1124c +3:1069c + * This method deserializes the JSON read from the specified reader into an object of the +====1 +1:914c + * this method works fine if the any of the fields of the specified object are generics, just the +2:1128c +3:1073c + * this method works fine if any of the fields of the specified object are generics, just the +====1 +1:916c + * invoke {@link #fromJson(Reader, Type)}. If you have the Json in a String form instead of a +2:1130c +3:1075c + * invoke {@link #fromJson(Reader, TypeToken)}. If you have the JSON in a String form instead of a +====1 +1:918a +2:1133,1135c +3:1078,1080c + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:920c + * @param json the reader producing the Json from which the object is to be deserialized. +2:1137c +3:1082c + * @param json the reader producing the JSON from which the object is to be deserialized. +====1 +1:922c + * @return an object of type T from the string. Returns {@code null} if {@code json} is at EOF. +2:1139c +3:1084c + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====1 +1:924c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1141c +3:1086c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====1 +1:925a +2:1143,1145c +3:1088,1090c + * + * @see #fromJson(String, Class) + * @see #fromJson(Reader, TypeToken) +====1 +1:928,930c + JsonReader jsonReader = newJsonReader(json); + Object object = fromJson(jsonReader, classOfT); + assertFullConsumption(object, jsonReader); +2:1148c +3:1093c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:935c + * This method deserializes the Json read from the specified reader into an object of the +2:1153c +3:1098c + * This method deserializes the JSON read from the specified reader into an object of the +====1 +1:937c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the Json in a +2:1155c +3:1100c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a +====1 +1:939a +2:1158,1165c +3:1103,1110c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(Reader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====1 +1:941,948c + * @param json the reader producing Json from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is at EOF. +2:1167,1169c +3:1112,1114c + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====1 +1:950c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1171c +3:1116c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====1 +1:951a +2:1173,1176c +3:1118,1121c + * + * @see #fromJson(String, Type) + * @see #fromJson(Reader, Class) + * @see #fromJson(Reader, TypeToken) +====1 +1:954a +2:1180,1208c +3:1125,1153c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified reader into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a + * String form instead of a {@link Reader}, use {@link #fromJson(String, TypeToken)} instead. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * + * @param the type of the desired object + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. + * @throws JsonIOException if there was a problem reading from the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type of typeOfT + * + * @see #fromJson(String, TypeToken) + * @see #fromJson(Reader, Class) + * @since 2.10 + */ + public T fromJson(Reader json, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====1 +1:956c + T object = (T) fromJson(jsonReader, typeOfT); +2:1210c +3:1155c + T object = fromJson(jsonReader, typeOfT); +====1 +1:964c + throw new JsonIOException("JSON document was not fully consumed."); +2:1218c +3:1163c + throw new JsonSyntaxException("JSON document was not fully consumed."); +====1 +1:972a +2:1227,1229c +3:1172,1174c + // fromJson(JsonReader, Class) is unfortunately missing and cannot be added now without breaking + // source compatibility in certain cases, see https://github.com/google/gson/pull/1700#discussion_r973764414 + +====1 +1:974c + * Reads the next JSON value from {@code reader} and convert it to an object +2:1231c +3:1176c + * Reads the next JSON value from {@code reader} and converts it to an object +====1 +1:976c + * Since Type is not parameterized by T, this method is type unsafe and should be used carefully +2:1232a +3:1177a +====1 +1:978,979c + * @throws JsonIOException if there was a problem writing to the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type +2:1234,1255c +3:1179,1200c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonReader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. If the provided type is a + * {@code Class} the {@code TypeToken} can be created with {@link TypeToken#get(Class)}. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonReader, TypeToken) +====1 +1:982a +2:1259,1292c +3:1204,1237c + return (T) fromJson(reader, TypeToken.get(typeOfT)); + } + + /** + * Reads the next JSON value from {@code reader} and converts it to an object + * of type {@code typeOfT}. Returns {@code null}, if the {@code reader} is at EOF. + * This method is useful if the specified object is a generic type. For non-generic objects, + * {@link #fromJson(JsonReader, Type)} can be called, or {@link TypeToken#get(Class)} can + * be used to create the type token. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonReader, Type) + * @since 2.10 + */ + public T fromJson(JsonReader reader, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====1 +1:989,990c + TypeToken typeToken = (TypeToken) TypeToken.get(typeOfT); + TypeAdapter typeAdapter = getAdapter(typeToken); +2:1299c +3:1244c + TypeAdapter typeAdapter = getAdapter(typeOfT); +====1 +1:1017c + * This method deserializes the Json read from the specified parse tree into an object of the +2:1326c +3:1271c + * This method deserializes the JSON read from the specified parse tree into an object of the +====1 +1:1021c + * this method works fine if the any of the fields of the specified object are generics, just the +2:1330c +3:1275c + * this method works fine if any of the fields of the specified object are generics, just the +====1 +1:1023c + * invoke {@link #fromJson(JsonElement, Type)}. +2:1332,1333c +3:1277,1278c + * invoke {@link #fromJson(JsonElement, TypeToken)}. + * +====1 +1:1028c + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +2:1338c +3:1283c + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====1 +1:1030c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +2:1340c +3:1285c + * @throws JsonSyntaxException if json is not a valid representation for an object of type classOfT +====1 +1:1031a +2:1342,1344c +3:1287,1289c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(JsonElement, TypeToken) +====1 +1:1034c + Object object = fromJson(json, (Type) classOfT); +2:1347c +3:1292c + T object = fromJson(json, TypeToken.get(classOfT)); +====1 +1:1039c + * This method deserializes the Json read from the specified parse tree into an object of the +2:1352c +3:1297c + * This method deserializes the JSON read from the specified parse tree into an object of the +====1 +1:1042a +2:1356,1360c +3:1301,1305c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonElement, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * +====1 +1:1046,1052c + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +2:1364,1365c +3:1309,1310c + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====1 +1:1055a +2:1369,1372c +3:1314,1317c + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonElement, Class) + * @see #fromJson(JsonElement, TypeToken) +====1 +1:1058a +2:1376,1401c +3:1321,1346c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified parse tree into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(JsonElement, Class)} instead. + * + * @param the type of the desired object + * @param json the root of the parse tree of {@link JsonElement}s from which the object is to + * be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *
    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonElement, Class) + * @since 2.10 + */ + public T fromJson(JsonElement json, TypeToken typeOfT) throws JsonSyntaxException { +====1 +1:1062c + return (T) fromJson(new JsonTreeReader(json), typeOfT); +2:1405c +3:1350c + return fromJson(new JsonTreeReader(json), typeOfT); +==== +1:1065,1066c + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate; +2:1408,1418c + <<<<<<< HEAD + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; + ||||||| 47dea2ee + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate; + ======= + static class FutureTypeAdapter extends SerializationDelegatingTypeAdapter { + private TypeAdapter delegate; + >>>>>>> TEMP_RIGHT_BRANCH +3:1353,1355c + static class FutureTypeAdapter extends SerializationDelegatingTypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; +==== +1:1075c + @Override public T read(JsonReader in) throws IOException { +2:1427,1446c + <<<<<<< HEAD + public void markBroken() { + isBroken = true; + } + + private TypeAdapter getResolvedDelegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + ||||||| 47dea2ee + @Override public T read(JsonReader in) throws IOException { + if (delegate == null) { + throw new IllegalStateException(); + ======= + private TypeAdapter delegate() { + if (delegate == null) { + throw new IllegalStateException("Delegate has not been set yet"); + >>>>>>> TEMP_RIGHT_BRANCH + } + <<<<<<< HEAD +3:1364,1372c + public void markBroken() { + isBroken = true; + } + + private TypeAdapter delegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } +====1 +1:1077c + throw new IllegalStateException(); +2:1448,1451c +3:1374,1377c + // Can occur when adapter is leaked to other thread or when adapter is used for (de-)serialization + // directly within the TypeAdapterFactory which requested it + throw new IllegalStateException("Adapter for type with cyclic dependency has been used" + + " before dependency has been resolved"); +==== +1:1079c + return delegate.read(in); +2:1453,1470c + return delegate; + } + + @Override public T read(JsonReader in) throws IOException { + return getResolvedDelegate().read(in); + ||||||| 47dea2ee + return delegate.read(in); + ======= + return delegate; + } + + @Override public TypeAdapter getSerializationDelegate() { + return delegate(); + } + + @Override public T read(JsonReader in) throws IOException { + return delegate().read(in); + >>>>>>> TEMP_RIGHT_BRANCH +3:1379,1387c + return delegate; + } + + @Override public TypeAdapter getSerializationDelegate() { + return delegate(); + } + + @Override public T read(JsonReader in) throws IOException { + return delegate().read(in); +==== +1:1083,1086c + if (delegate == null) { + throw new IllegalStateException(); + } + delegate.write(out, value); +2:1474,1483c + <<<<<<< HEAD + getResolvedDelegate().write(out, value); + ||||||| 47dea2ee + if (delegate == null) { + throw new IllegalStateException(); + } + delegate.write(out, value); + ======= + delegate().write(out, value); + >>>>>>> TEMP_RIGHT_BRANCH +3:1391c + delegate().write(out, value); diff --git a/src/python/merge_conflict_analysis_diffs/1006/intellimerge/diff_GsonTest.java.txt b/src/python/merge_conflict_analysis_diffs/1006/intellimerge/diff_GsonTest.java.txt new file mode 100644 index 0000000000..fc1ab3d00a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/intellimerge/diff_GsonTest.java.txt @@ -0,0 +1,901 @@ +====1 +1:18a +2:19c +3:19c + import com.google.gson.Gson.FutureTypeAdapter; +====1 +1:19a +2:21c +3:21c + import com.google.gson.reflect.TypeToken; +====1 +1:29a +2:32c +3:32c + import java.util.Collections; +==== +1:30a +2:34,41c + <<<<<<< HEAD + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.atomic.AtomicReference; + ||||||| 47dea2ee + ======= + import java.util.concurrent.atomic.AtomicInteger; + import java.util.concurrent.atomic.AtomicReference; + >>>>>>> TEMP_RIGHT_BRANCH +3:34,36c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.atomic.AtomicInteger; + import java.util.concurrent.atomic.AtomicReference; +====1 +1:59c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +2:70,71c +3:65,66c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====1 +1:73c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +2:85,86c +3:80,81c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +==== +1:88a +2:102,164c + public void testGetAdapter_Null() { + Gson gson = new Gson(); + try { + gson.getAdapter((TypeToken) null); + fail(); + } catch (NullPointerException e) { + assertEquals("type must not be null", e.getMessage()); + } + } + + public void testGetAdapter_Concurrency() { + class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for test"); + } + } + + final AtomicInteger adapterInstancesCreated = new AtomicInteger(0); + final AtomicReference> threadAdapter = new AtomicReference<>(); + final Class requestedType = Number.class; + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + private volatile boolean isFirstCall = true; + + @Override public TypeAdapter create(final Gson gson, TypeToken type) { + if (isFirstCall) { + isFirstCall = false; + + // Create a separate thread which requests an adapter for the same type + // This will cause this factory to return a different adapter instance than + // the one it is currently creating + Thread thread = new Thread() { + @Override public void run() { + threadAdapter.set(gson.getAdapter(requestedType)); + } + }; + thread.start(); + try { + thread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + // Create a new dummy adapter instance + adapterInstancesCreated.incrementAndGet(); + return new DummyAdapter<>(); + } + }) + .create(); + + TypeAdapter adapter = gson.getAdapter(requestedType); + assertTrue(adapter instanceof DummyAdapter); + assertEquals(2, adapterInstancesCreated.get()); + // Should be the same adapter instance the concurrent thread received + assertSame(threadAdapter.get(), adapter); + } + +3:97,149c + public void testGetAdapter_Null() { + Gson gson = new Gson(); + try { + gson.getAdapter((TypeToken) null); + fail(); + } catch (NullPointerException e) { + assertEquals("type must not be null", e.getMessage()); + } + } + + public void testGetAdapter_Concurrency() { + final AtomicInteger adapterInstancesCreated = new AtomicInteger(0); + final AtomicReference> threadAdapter = new AtomicReference<>(); + final Class requestedType = Number.class; + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + private volatile boolean isFirstCall = true; + + @Override public TypeAdapter create(final Gson gson, TypeToken type) { + if (isFirstCall) { + isFirstCall = false; + + // Create a separate thread which requests an adapter for the same type + // This will cause this factory to return a different adapter instance than + // the one it is currently creating + Thread thread = new Thread() { + @Override public void run() { + threadAdapter.set(gson.getAdapter(requestedType)); + } + }; + thread.start(); + try { + thread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + // Create a new dummy adapter instance + adapterInstancesCreated.incrementAndGet(); + return new DummyAdapter<>(); + } + }) + .create(); + + TypeAdapter adapter = gson.getAdapter(requestedType); + assertTrue(adapter instanceof DummyAdapter); + assertEquals(2, adapterInstancesCreated.get()); + // Should be the same adapter instance the concurrent thread received + assertSame(threadAdapter.get(), adapter); + } + +==== +1:154a +2:231,597c + <<<<<<< HEAD + + /** + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + if (callCount == 0) { + callCount++; + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + } + ||||||| 47dea2ee + ======= + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a + * {@code new Gson()} should not affect the Gson instance it came from. + */ + public void testDefaultGsonNewBuilderModification() { + Gson gson = new Gson(); + GsonBuilder gsonBuilder = gson.newBuilder(); + + // Modifications of `gsonBuilder` should not affect `gson` object + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }); + + assertDefaultGson(gson); + // New GsonBuilder created from `gson` should not have been affected by changes either + assertDefaultGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should use custom adapters + assertCustomGson(gsonBuilder.create()); + } + + private static void assertDefaultGson(Gson gson) { + // Should use default reflective adapter + String json1 = gson.toJson(new CustomClass1()); + assertEquals("{}", json1); + + // Should use default reflective adapter + String json2 = gson.toJson(new CustomClass2()); + assertEquals("{}", json2); + + // Should use default instance creator + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals(CustomClass3.NO_ARG_CONSTRUCTOR_VALUE, customClass3.s); + } + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a custom + * Gson instance (created using a GsonBuilder) should not affect the Gson instance + * it came from. + */ + public void testNewBuilderModification() { + Gson gson = new GsonBuilder() + .registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }) + .registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }) + .registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }) + .create(); + + assertCustomGson(gson); + + // Modify `gson.newBuilder()` + GsonBuilder gsonBuilder = gson.newBuilder(); + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("overwritten custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("overwritten custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("overwritten custom-instance"); + } + }); + + // `gson` object should not have been affected by changes to new GsonBuilder + assertCustomGson(gson); + // New GsonBuilder based on `gson` should not have been affected either + assertCustomGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should be affected by changes + Gson otherGson = gsonBuilder.create(); + String json1 = otherGson.toJson(new CustomClass1()); + assertEquals("\"overwritten custom-adapter\"", json1); + + String json2 = otherGson.toJson(new CustomClass2()); + assertEquals("\"overwritten custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = otherGson.fromJson("{}", CustomClass3.class); + assertEquals("overwritten custom-instance", customClass3.s); + } + + private static void assertCustomGson(Gson gson) { + String json1 = gson.toJson(new CustomClass1()); + assertEquals("\"custom-adapter\"", json1); + + String json2 = gson.toJson(new CustomClass2()); + assertEquals("\"custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals("custom-instance", customClass3.s); + } + + static class CustomClass1 { } + static class CustomClass2 { } + static class CustomClass3 { + static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance"; + + final String s; + + public CustomClass3(String s) { + this.s = s; + } + + public CustomClass3() { + this(NO_ARG_CONSTRUCTOR_VALUE); + } + } + >>>>>>> TEMP_RIGHT_BRANCH +3:216,581c + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a + * {@code new Gson()} should not affect the Gson instance it came from. + */ + public void testDefaultGsonNewBuilderModification() { + Gson gson = new Gson(); + GsonBuilder gsonBuilder = gson.newBuilder(); + + // Modifications of `gsonBuilder` should not affect `gson` object + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }); + + assertDefaultGson(gson); + // New GsonBuilder created from `gson` should not have been affected by changes either + assertDefaultGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should use custom adapters + assertCustomGson(gsonBuilder.create()); + } + + private static void assertDefaultGson(Gson gson) { + // Should use default reflective adapter + String json1 = gson.toJson(new CustomClass1()); + assertEquals("{}", json1); + + // Should use default reflective adapter + String json2 = gson.toJson(new CustomClass2()); + assertEquals("{}", json2); + + // Should use default instance creator + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals(CustomClass3.NO_ARG_CONSTRUCTOR_VALUE, customClass3.s); + } + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a custom + * Gson instance (created using a GsonBuilder) should not affect the Gson instance + * it came from. + */ + public void testNewBuilderModification() { + Gson gson = new GsonBuilder() + .registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }) + .registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }) + .registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }) + .create(); + + assertCustomGson(gson); + + // Modify `gson.newBuilder()` + GsonBuilder gsonBuilder = gson.newBuilder(); + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("overwritten custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("overwritten custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("overwritten custom-instance"); + } + }); + + // `gson` object should not have been affected by changes to new GsonBuilder + assertCustomGson(gson); + // New GsonBuilder based on `gson` should not have been affected either + assertCustomGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should be affected by changes + Gson otherGson = gsonBuilder.create(); + String json1 = otherGson.toJson(new CustomClass1()); + assertEquals("\"overwritten custom-adapter\"", json1); + + String json2 = otherGson.toJson(new CustomClass2()); + assertEquals("\"overwritten custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = otherGson.fromJson("{}", CustomClass3.class); + assertEquals("overwritten custom-instance", customClass3.s); + } + + private static void assertCustomGson(Gson gson) { + String json1 = gson.toJson(new CustomClass1()); + assertEquals("\"custom-adapter\"", json1); + + String json2 = gson.toJson(new CustomClass2()); + assertEquals("\"custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals("custom-instance", customClass3.s); + } + + static class CustomClass1 { } + static class CustomClass2 { } + static class CustomClass3 { + static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance"; + + final String s; + + public CustomClass3(String s) { + this.s = s; + } + + public CustomClass3() { + this(NO_ARG_CONSTRUCTOR_VALUE); + } + } + + /** + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + try { + if (callCount++ == 0) { + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } finally { + callCount--; + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + } diff --git a/src/python/merge_conflict_analysis_diffs/1006/spork/diff_Gson.java.txt b/src/python/merge_conflict_analysis_diffs/1006/spork/diff_Gson.java.txt new file mode 100644 index 0000000000..979214efc7 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/spork/diff_Gson.java.txt @@ -0,0 +1,1124 @@ +====1 +1:19,37c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; + +2:18a +3:18a +====3 +1:53a +2:34a +3:35c + import com.google.gson.internal.bind.SerializationDelegatingTypeAdapter; +==== +1:60a +2:42,61c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +3:43,63c + import java.io.EOFException; + import java.io.IOException; + import java.io.Reader; + import java.io.StringReader; + import java.io.StringWriter; + import java.io.Writer; + import java.lang.reflect.Type; + import java.math.BigDecimal; + import java.math.BigInteger; + import java.text.DateFormat; + import java.util.ArrayList; + import java.util.Collections; + import java.util.Iterator; + import java.util.LinkedHashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + import java.util.concurrent.ConcurrentHashMap; + import java.util.concurrent.ConcurrentMap; + import java.util.concurrent.atomic.AtomicLong; + import java.util.concurrent.atomic.AtomicLongArray; +====3 +1:78c +2:79c + * String json = gson.toJson(target); // serializes target to Json +3:81c + * String json = gson.toJson(target); // serializes target to JSON +====3 +1:82,86c +2:83,87c + *

    If the object that your are serializing/deserializing is a {@code ParameterizedType} + * (i.e. contains at least one type parameter and may be an array) then you must use the + * {@link #toJson(Object, Type)} or {@link #fromJson(String, Type)} method. Here is an + * example for serializing and deserializing a {@code ParameterizedType}: + * +3:85,91c + *

    If the type of the object that you are converting is a {@code ParameterizedType} + * (i.e. has at least one type argument, for example {@code List}) then for + * deserialization you must use a {@code fromJson} method with {@link Type} or {@link TypeToken} + * parameter to specify the parameterized type. For serialization specifying a {@code Type} + * or {@code TypeToken} is optional, otherwise Gson will use the runtime type of the object. + * {@link TypeToken} is a class provided by Gson which helps creating parameterized types. + * Here is an example showing how this can be done: +====3 +1:88,90c +2:89,91c + * Type listType = new TypeToken<List<String>>() {}.getType(); + * List<String> target = new LinkedList<String>(); + * target.add("blah"); +3:93,95c + * TypeToken<List<MyType>> listType = new TypeToken<List<MyType>>() {}; + * List<MyType> target = new LinkedList<MyType>(); + * target.add(new MyType(1, "abc")); +====3 +1:93,94c +2:94,95c + * String json = gson.toJson(target, listType); + * List<String> target2 = gson.fromJson(json, listType); +3:98,103c + * // For serialization you normally do not have to specify the type, Gson will use + * // the runtime type of the objects, however you can also specify it explicitly + * String json = gson.toJson(target, listType.getType()); + * + * // But for deserialization you have to specify the type + * List<MyType> target2 = gson.fromJson(json, listType); +====3 +1:97c +2:98c + *

    See the Gson User Guide +3:106c + *

    See the Gson User Guide +====3 +1:100c +2:101c + * @see com.google.gson.reflect.TypeToken +3:109,136c + *

    Lenient JSON handling

    + * For legacy reasons most of the {@code Gson} methods allow JSON data which does not + * comply with the JSON specification, regardless of whether {@link GsonBuilder#setLenient()} + * is used or not. If this behavior is not desired, the following workarounds can be used: + * + *

    Serialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be serialized + *
    2. When using an existing {@code JsonWriter}, manually apply the writer settings of this + * {@code Gson} instance listed by {@link #newJsonWriter(Writer)}.
      + * Otherwise, when not using an existing {@code JsonWriter}, use {@link #newJsonWriter(Writer)} + * to construct one. + *
    3. Call {@link TypeAdapter#write(JsonWriter, Object)} + *
    + * + *

    Deserialization

    + *
      + *
    1. Use {@link #getAdapter(Class)} to obtain the adapter for the type to be deserialized + *
    2. When using an existing {@code JsonReader}, manually apply the reader settings of this + * {@code Gson} instance listed by {@link #newJsonReader(Reader)}.
      + * Otherwise, when not using an existing {@code JsonReader}, use {@link #newJsonReader(Reader)} + * to construct one. + *
    3. Call {@link TypeAdapter#read(JsonReader)} + *
    4. Call {@link JsonReader#peek()} and verify that the result is {@link JsonToken#END_DOCUMENT} + * to make sure there is no trailing data + *
    + * + * @see TypeToken +====3 +1:120c +2:121c + private static final TypeToken NULL_KEY_SURROGATE = TypeToken.get(Object.class); +3:155a +====1 +1:130,131c + private final ThreadLocal, FutureTypeAdapter>> calls + = new ThreadLocal, FutureTypeAdapter>>(); +2:131,132c +3:165,166c + // Uses LinkedHashMap because iteration order is important, see getAdapter() implementation below + private final ThreadLocal, TypeAdapter>> calls = new ThreadLocal<>(); +==== +1:133c + private final Map, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); +2:134c + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap, TypeAdapter>(); +3:168c + private final ConcurrentMap, TypeAdapter> typeTokenCache = new ConcurrentHashMap<>(); +====3 +1:158a +2:159a +3:194c + final List reflectionFilters; +====3 +1:185c +2:186c + *
  • The default field naming policy for the output Json is same as in Java. So, a Java class +3:221c + *
  • The default field naming policy for the output JSON is same as in Java. So, a Java class +====3 +1:187c +2:188c + * Json. The same rules are applied for mapping incoming Json to the Java classes. You can +3:223c + * JSON. The same rules are applied for mapping incoming JSON to the Java classes. You can +====3 +1:202c +2:203c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY); +3:238,239c + Collections.emptyList(), DEFAULT_OBJECT_TO_NUMBER_STRATEGY, DEFAULT_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====3 +1:214c +2:215c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy) { +3:251,252c + ToNumberStrategy objectToNumberStrategy, ToNumberStrategy numberToNumberStrategy, + List reflectionFilters) { +====3 +1:218c +2:219c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe); +3:256c + this.constructorConstructor = new ConstructorConstructor(instanceCreators, useJdkUnsafe, reflectionFilters); +====3 +1:234a +2:235a +3:273c + this.reflectionFilters = reflectionFilters; +====3 +1:236c +2:237c + List factories = new ArrayList(); +3:275c + List factories = new ArrayList<>(); +====3 +1:299c +2:300c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory)); +3:338c + constructorConstructor, fieldNamingStrategy, excluder, jsonAdapterFactory, reflectionFilters)); +====3 +1:308a +2:309a +3:348c + * @since 2.8.3 +====3 +1:371c +2:372c + out.value(value); +3:411c + out.value(doubleValue); +====3 +1:395c +2:396c + out.value(value); +3:435,438c + // For backward compatibility don't call `JsonWriter.value(float)` because that method has + // been newly added and not all custom JsonWriter implementations might override it yet + Number floatNumber = value instanceof Float ? value : floatValue; + out.value(floatNumber); +====3 +1:452c +2:453c + List list = new ArrayList(); +3:495c + List list = new ArrayList<>(); +====3 +1:475c +2:476c + @SuppressWarnings("unchecked") +3:517a +====3 +1:477c +2:478c + TypeAdapter cached = typeTokenCache.get(type == null ? NULL_KEY_SURROGATE : type); +3:519,520c + Objects.requireNonNull(type, "type must not be null"); + TypeAdapter cached = typeTokenCache.get(type); +====3 +1:479c +2:480c + return (TypeAdapter) cached; +3:522,524c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) cached; + return adapter; +====1 +1:482,483c + Map, FutureTypeAdapter> threadCalls = calls.get(); + boolean requiresThreadLocalCleanup = false; +2:483,484c +3:527,528c + LinkedHashMap, TypeAdapter> threadCalls = calls.get(); + boolean isInitialAdapterRequest = false; +====1 +1:485c + threadCalls = new HashMap, FutureTypeAdapter>(); +2:486c +3:530c + threadCalls = new LinkedHashMap<>(); +====1 +1:487c + requiresThreadLocalCleanup = true; +2:488c +3:532c + isInitialAdapterRequest = true; +==== +1:491c + FutureTypeAdapter ongoingCall = (FutureTypeAdapter) threadCalls.get(type); +2:492c + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); +3:536,537c + @SuppressWarnings("unchecked") + TypeAdapter ongoingCall = (TypeAdapter) threadCalls.get(type); +====1 +1:495a +2:497,498c +3:542,543c + int existingAdaptersCount = threadCalls.size(); + boolean foundCandidate = false; +====1 +1:497c + FutureTypeAdapter call = new FutureTypeAdapter(); +2:500c +3:545c + FutureTypeAdapter call = new FutureTypeAdapter<>(); +==== +1:504c + typeTokenCache.put(type, candidate); +2:507,519c + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + } + foundCandidate = true; +3:552,570c + // Replace future adapter with actual adapter + threadCalls.put(type, candidate); + + if (isInitialAdapterRequest) { + // Publish resolved adapters to all threads + // Can only do this for the initial request because cyclic dependency TypeA -> TypeB -> TypeA + // would otherwise publish adapter for TypeB which uses not yet resolved adapter for TypeA + // See https://github.com/google/gson/issues/625 + for (Map.Entry, TypeAdapter> resolvedAdapterEntry : threadCalls.entrySet()) { + typeTokenCache.putIfAbsent(resolvedAdapterEntry.getKey(), resolvedAdapterEntry.getValue()); + } + + @SuppressWarnings("unchecked") + TypeAdapter actualAdapter = (TypeAdapter) typeTokenCache.get(type); + // Prefer the actual adapter, in case putIfAbsent call above had no effect because other + // thread already concurrently added other adapter instance for the same type + candidate = actualAdapter; + } + foundCandidate = true; +====1 +1:510,512c + threadCalls.remove(type); + + if (requiresThreadLocalCleanup) { +2:525c +3:576c + if (isInitialAdapterRequest) { +==== +1:514a +2:528,545c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +3:579,596c + if (!foundCandidate) { + Iterator> adaptersIterator = threadCalls.values().iterator(); + // Skip existing non-broken adapters + for (; existingAdaptersCount > 0; existingAdaptersCount--) { + adaptersIterator.next(); + } + // Remove this future adapter and all nested ones because they might + // refer to broken adapters + while (adaptersIterator.hasNext()) { + TypeAdapter brokenAdapter = adaptersIterator.next(); + if (brokenAdapter instanceof FutureTypeAdapter) { + // Mark adapter as broken so user sees useful exception message in + // case TypeAdapterFactory leaks reference to broken adapter + ((FutureTypeAdapter) brokenAdapter).markBroken(); + } + adaptersIterator.remove(); + } + } +====3 +1:607c +2:638c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +3:689c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====3 +1:611,612c +2:642,643c + * @param src the object for which Json representation is to be created setting for Gson + * @return Json representation of {@code src}. +3:693,694c + * @param src the object for which JSON representation is to be created + * @return JSON representation of {@code src}. +====3 +1:613a +2:644a +3:696,697c + * + * @see #toJsonTree(Object, Type) +====3 +1:636a +2:667a +3:721,722c + * + * @see #toJsonTree(Object) +====3 +1:645c +2:676c + * This method serializes the specified object into its equivalent Json representation. +3:731c + * This method serializes the specified object into its equivalent JSON representation. +====3 +1:649c +2:680c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +3:735c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====3 +1:654c +2:685c + * @param src the object for which Json representation is to be created setting for Gson +3:740c + * @param src the object for which JSON representation is to be created +====3 +1:655a +2:686a +3:742,744c + * + * @see #toJson(Object, Appendable) + * @see #toJson(Object, Type) +====3 +1:666c +2:697c + * equivalent Json representation. This method must be used if the specified object is a generic +3:755c + * equivalent JSON representation. This method must be used if the specified object is a generic +====3 +1:677c +2:708c + * @return Json representation of {@code src} +3:766,769c + * @return JSON representation of {@code src} + * + * @see #toJson(Object, Type, Appendable) + * @see #toJson(Object) +====3 +1:686c +2:717c + * This method serializes the specified object into its equivalent Json representation. +3:778,779c + * This method serializes the specified object into its equivalent JSON representation and + * writes it to the writer. +====3 +1:690c +2:721c + * of Java. Note that this method works fine if the any of the object fields are of generic type, +3:783c + * of Java. Note that this method works fine if any of the object fields are of generic type, +====3 +1:694,695c +2:725,726c + * @param src the object for which Json representation is to be created setting for Gson + * @param writer Writer to which the Json representation needs to be written +3:787,788c + * @param src the object for which JSON representation is to be created + * @param writer Writer to which the JSON representation needs to be written +====3 +1:697a +2:728a +3:791,793c + * + * @see #toJson(Object) + * @see #toJson(Object, Type, Appendable) +====3 +1:709,710c +2:740,741c + * equivalent Json representation. This method must be used if the specified object is a generic + * type. For non-generic objects, use {@link #toJson(Object, Appendable)} instead. +3:805,807c + * equivalent JSON representation and writes it to the writer. + * This method must be used if the specified object is a generic type. For non-generic objects, + * use {@link #toJson(Object, Appendable)} instead. +====3 +1:719c +2:750c + * @param writer Writer to which the Json representation of src needs to be written. +3:816c + * @param writer Writer to which the JSON representation of src needs to be written. +====3 +1:721a +2:752a +3:819,821c + * + * @see #toJson(Object, Type) + * @see #toJson(Object, Appendable) +====3 +1:734a +2:765a +3:835,843c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====3 +1:737c +2:768c + @SuppressWarnings("unchecked") +3:845a +====3 +1:739c +2:770c + TypeAdapter adapter = getAdapter(TypeToken.get(typeOfSrc)); +3:847,848c + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) getAdapter(TypeToken.get(typeOfSrc)); +====3 +1:747c +2:778c + ((TypeAdapter) adapter).write(writer, src); +3:856c + adapter.write(writer, src); +====3 +1:778c +2:809c + * @param writer Writer to which the Json representation needs to be written +3:887c + * @param writer Writer to which the JSON representation needs to be written +====3 +1:832a +2:863a +3:942,950c + * + *

    The JSON data is written in {@linkplain JsonWriter#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided writer. The lenient mode setting + * of the writer is restored once this method returns. + * + *

    The 'HTML-safe' and 'serialize {@code null}' settings of this {@code Gson} instance + * (configured by the {@link GsonBuilder}) are applied, and the original settings of the + * writer are restored once this method returns. + * +====3 +1:858c +2:889c + * This method deserializes the specified Json into an object of the specified class. It is not +3:976c + * This method deserializes the specified JSON into an object of the specified class. It is not +====3 +1:864c +2:895c + * {@link #fromJson(String, Type)}. If you have the Json in a {@link Reader} instead of +3:982c + * {@link #fromJson(String, TypeToken)}. If you have the JSON in a {@link Reader} instead of +====3 +1:866a +2:897a +3:985,987c + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====3 +1:873a +2:904a +3:995,997c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(String, TypeToken) +====3 +1:876c +2:907c + Object object = fromJson(json, (Type) classOfT); +3:1000c + T object = fromJson(json, TypeToken.get(classOfT)); +====3 +1:881c +2:912c + * This method deserializes the specified Json into an object of the specified type. This method +3:1005c + * This method deserializes the specified JSON into an object of the specified type. This method +====3 +1:883c +2:914c + * {@link #fromJson(String, Class)} instead. If you have the Json in a {@link Reader} instead of +3:1007c + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of +====3 +1:886,889c +2:917,920c + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for +3:1010,1047c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(String, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, + * or if there is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is + * not desired. + * + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the string. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(String, Class) + * @see #fromJson(String, TypeToken) + */ + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the specified JSON into an object of the specified type. This method + * is useful if the specified object is a generic type. For non-generic objects, use + * {@link #fromJson(String, Class)} instead. If you have the JSON in a {@link Reader} instead of + * a String, use {@link #fromJson(Reader, TypeToken)} instead. + * + *

    An exception is thrown if the JSON string has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * + * @param the type of the desired object + * @param json the string from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for +====3 +1:892c +2:923c + * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType(); +3:1050c + * new TypeToken<Collection<Foo>>(){} +====3 +1:896,897c +2:927,928c + * @throws JsonParseException if json is not a valid representation for an object of type typeOfT + * @throws JsonSyntaxException if json is not a valid representation for an object of type +3:1054,1058c + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(String, Class) + * @since 2.10 +====3 +1:899,900c +2:930,931c + @SuppressWarnings("unchecked") + public T fromJson(String json, Type typeOfT) throws JsonSyntaxException { +3:1060c + public T fromJson(String json, TypeToken typeOfT) throws JsonSyntaxException { +====3 +1:905,906c +2:936,937c + T target = (T) fromJson(reader, typeOfT); + return target; +3:1065c + return fromJson(reader, typeOfT); +====3 +1:910c +2:941c + * This method deserializes the Json read from the specified reader into an object of the +3:1069c + * This method deserializes the JSON read from the specified reader into an object of the +====3 +1:914c +2:945c + * this method works fine if the any of the fields of the specified object are generics, just the +3:1073c + * this method works fine if any of the fields of the specified object are generics, just the +====3 +1:916c +2:947c + * invoke {@link #fromJson(Reader, Type)}. If you have the Json in a String form instead of a +3:1075c + * invoke {@link #fromJson(Reader, TypeToken)}. If you have the JSON in a String form instead of a +====3 +1:918a +2:949a +3:1078,1080c + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====3 +1:920c +2:951c + * @param json the reader producing the Json from which the object is to be deserialized. +3:1082c + * @param json the reader producing the JSON from which the object is to be deserialized. +====3 +1:922c +2:953c + * @return an object of type T from the string. Returns {@code null} if {@code json} is at EOF. +3:1084c + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====3 +1:924c +2:955c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +3:1086c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====3 +1:925a +2:956a +3:1088,1090c + * + * @see #fromJson(String, Class) + * @see #fromJson(Reader, TypeToken) +====3 +1:928,930c +2:959,961c + JsonReader jsonReader = newJsonReader(json); + Object object = fromJson(jsonReader, classOfT); + assertFullConsumption(object, jsonReader); +3:1093c + T object = fromJson(json, TypeToken.get(classOfT)); +====3 +1:935c +2:966c + * This method deserializes the Json read from the specified reader into an object of the +3:1098c + * This method deserializes the JSON read from the specified reader into an object of the +====3 +1:937c +2:968c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the Json in a +3:1100c + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a +====3 +1:939a +2:970a +3:1103,1110c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(Reader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired. + * +====3 +1:941,948c +2:972,979c + * @param json the reader producing Json from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is at EOF. +3:1112,1114c + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. +====3 +1:950c +2:981c + * @throws JsonSyntaxException if json is not a valid representation for an object of type +3:1116c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +====3 +1:951a +2:982a +3:1118,1121c + * + * @see #fromJson(String, Type) + * @see #fromJson(Reader, Class) + * @see #fromJson(Reader, TypeToken) +====3 +1:954a +2:985a +3:1125,1153c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified reader into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the JSON in a + * String form instead of a {@link Reader}, use {@link #fromJson(String, TypeToken)} instead. + * + *

    An exception is thrown if the JSON data has multiple top-level JSON elements, or if there + * is trailing data. Use {@link #fromJson(JsonReader, TypeToken)} if this behavior is not desired. + * + * @param the type of the desired object + * @param json the reader producing JSON from which the object is to be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the Reader. Returns {@code null} if {@code json} is at EOF. + * @throws JsonIOException if there was a problem reading from the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type of typeOfT + * + * @see #fromJson(String, TypeToken) + * @see #fromJson(Reader, Class) + * @since 2.10 + */ + public T fromJson(Reader json, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====3 +1:956c +2:987c + T object = (T) fromJson(jsonReader, typeOfT); +3:1155c + T object = fromJson(jsonReader, typeOfT); +====3 +1:964c +2:995c + throw new JsonIOException("JSON document was not fully consumed."); +3:1163c + throw new JsonSyntaxException("JSON document was not fully consumed."); +====3 +1:972a +2:1003a +3:1172,1174c + // fromJson(JsonReader, Class) is unfortunately missing and cannot be added now without breaking + // source compatibility in certain cases, see https://github.com/google/gson/pull/1700#discussion_r973764414 + +====3 +1:974c +2:1005c + * Reads the next JSON value from {@code reader} and convert it to an object +3:1176c + * Reads the next JSON value from {@code reader} and converts it to an object +====3 +1:976c +2:1007c + * Since Type is not parameterized by T, this method is type unsafe and should be used carefully +3:1177a +====3 +1:978,979c +2:1009,1010c + * @throws JsonIOException if there was a problem writing to the Reader + * @throws JsonSyntaxException if json is not a valid representation for an object of type +3:1179,1200c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonReader, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. If the provided type is a + * {@code Class} the {@code TypeToken} can be created with {@link TypeToken#get(Class)}. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonReader, TypeToken) +====3 +1:982a +2:1013a +3:1204,1237c + return (T) fromJson(reader, TypeToken.get(typeOfT)); + } + + /** + * Reads the next JSON value from {@code reader} and converts it to an object + * of type {@code typeOfT}. Returns {@code null}, if the {@code reader} is at EOF. + * This method is useful if the specified object is a generic type. For non-generic objects, + * {@link #fromJson(JsonReader, Type)} can be called, or {@link TypeToken#get(Class)} can + * be used to create the type token. + * + *

    Unlike the other {@code fromJson} methods, no exception is thrown if the JSON data has + * multiple top-level JSON elements, or if there is trailing data. + * + *

    The JSON data is parsed in {@linkplain JsonReader#setLenient(boolean) lenient mode}, + * regardless of the lenient mode setting of the provided reader. The lenient mode setting + * of the reader is restored once this method returns. + * + * @param the type of the desired object + * @param reader the reader whose next JSON value should be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JsonReader. Returns {@code null} if {@code reader} is at EOF. + * @throws JsonIOException if there was a problem reading from the JsonReader + * @throws JsonSyntaxException if json is not a valid representation for an object of the type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonReader, Type) + * @since 2.10 + */ + public T fromJson(JsonReader reader, TypeToken typeOfT) throws JsonIOException, JsonSyntaxException { +====3 +1:989,990c +2:1020,1021c + TypeToken typeToken = (TypeToken) TypeToken.get(typeOfT); + TypeAdapter typeAdapter = getAdapter(typeToken); +3:1244c + TypeAdapter typeAdapter = getAdapter(typeOfT); +====3 +1:1017c +2:1048c + * This method deserializes the Json read from the specified parse tree into an object of the +3:1271c + * This method deserializes the JSON read from the specified parse tree into an object of the +====3 +1:1021c +2:1052c + * this method works fine if the any of the fields of the specified object are generics, just the +3:1275c + * this method works fine if any of the fields of the specified object are generics, just the +====3 +1:1023c +2:1054c + * invoke {@link #fromJson(JsonElement, Type)}. +3:1277,1278c + * invoke {@link #fromJson(JsonElement, TypeToken)}. + * +====3 +1:1028c +2:1059c + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +3:1283c + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====3 +1:1030c +2:1061c + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT +3:1285c + * @throws JsonSyntaxException if json is not a valid representation for an object of type classOfT +====3 +1:1031a +2:1062a +3:1287,1289c + * + * @see #fromJson(Reader, Class) + * @see #fromJson(JsonElement, TypeToken) +====3 +1:1034c +2:1065c + Object object = fromJson(json, (Type) classOfT); +3:1292c + T object = fromJson(json, TypeToken.get(classOfT)); +====3 +1:1039c +2:1070c + * This method deserializes the Json read from the specified parse tree into an object of the +3:1297c + * This method deserializes the JSON read from the specified parse tree into an object of the +====3 +1:1042a +2:1073a +3:1301,1305c + *

    Since {@code Type} is not parameterized by T, this method is not type-safe and + * should be used carefully. If you are creating the {@code Type} from a {@link TypeToken}, + * prefer using {@link #fromJson(JsonElement, TypeToken)} instead since its return type is based + * on the {@code TypeToken} and is therefore more type-safe. + * +====3 +1:1046,1052c +2:1077,1083c + * @param typeOfT The specific genericized type of src. You can obtain this type by using the + * {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for + * {@code Collection}, you should use: + *

    +     * Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
    +     * 
    + * @return an object of type T from the json. Returns {@code null} if {@code json} is {@code null} +3:1309,1310c + * @param typeOfT The specific genericized type of src + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} +====3 +1:1055a +2:1086a +3:1314,1317c + * + * @see #fromJson(Reader, Type) + * @see #fromJson(JsonElement, Class) + * @see #fromJson(JsonElement, TypeToken) +====3 +1:1058a +2:1089a +3:1321,1346c + return (T) fromJson(json, TypeToken.get(typeOfT)); + } + + /** + * This method deserializes the JSON read from the specified parse tree into an object of the + * specified type. This method is useful if the specified object is a generic type. For + * non-generic objects, use {@link #fromJson(JsonElement, Class)} instead. + * + * @param the type of the desired object + * @param json the root of the parse tree of {@link JsonElement}s from which the object is to + * be deserialized + * @param typeOfT The specific genericized type of src. You should create an anonymous subclass of + * {@code TypeToken} with the specific generic type arguments. For example, to get the type for + * {@code Collection}, you should use: + *
    +     * new TypeToken<Collection<Foo>>(){}
    +     * 
    + * @return an object of type T from the JSON. Returns {@code null} if {@code json} is {@code null} + * or if {@code json} is empty. + * @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT + * + * @see #fromJson(Reader, TypeToken) + * @see #fromJson(JsonElement, Class) + * @since 2.10 + */ + public T fromJson(JsonElement json, TypeToken typeOfT) throws JsonSyntaxException { +====3 +1:1062c +2:1093c + return (T) fromJson(new JsonTreeReader(json), typeOfT); +3:1350c + return fromJson(new JsonTreeReader(json), typeOfT); +==== +1:1065,1066c + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate; +2:1096,1098c + static class FutureTypeAdapter extends TypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; +3:1353,1355c + static class FutureTypeAdapter extends SerializationDelegatingTypeAdapter { + private TypeAdapter delegate = null; + private boolean isBroken = false; +==== +1:1075c + @Override public T read(JsonReader in) throws IOException { +2:1107,1115c + public void markBroken() { + isBroken = true; + } + + private TypeAdapter getResolvedDelegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } +3:1364,1372c + public void markBroken() { + isBroken = true; + } + + private TypeAdapter delegate() { + TypeAdapter delegate = this.delegate; + if (isBroken) { + throw new IllegalStateException("Broken adapter has been leaked by TypeAdapterFactory"); + } +====1 +1:1077c + throw new IllegalStateException(); +2:1117,1120c +3:1374,1377c + // Can occur when adapter is leaked to other thread or when adapter is used for (de-)serialization + // directly within the TypeAdapterFactory which requested it + throw new IllegalStateException("Adapter for type with cyclic dependency has been used" + + " before dependency has been resolved"); +==== +1:1079c + return delegate.read(in); +2:1122,1126c + return delegate; + } + + @Override public T read(JsonReader in) throws IOException { + return getResolvedDelegate().read(in); +3:1379,1387c + return delegate; + } + + @Override public TypeAdapter getSerializationDelegate() { + return delegate(); + } + + @Override public T read(JsonReader in) throws IOException { + return delegate().read(in); +==== +1:1083,1086c + if (delegate == null) { + throw new IllegalStateException(); + } + delegate.write(out, value); +2:1130c + getResolvedDelegate().write(out, value); +3:1391c + delegate().write(out, value); diff --git a/src/python/merge_conflict_analysis_diffs/1006/spork/diff_GsonTest.java.txt b/src/python/merge_conflict_analysis_diffs/1006/spork/diff_GsonTest.java.txt new file mode 100644 index 0000000000..76854475ca --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1006/spork/diff_GsonTest.java.txt @@ -0,0 +1,681 @@ +====1 +1:18a +2:19c +3:19c + import com.google.gson.Gson.FutureTypeAdapter; +====1 +1:19a +2:21c +3:21c + import com.google.gson.reflect.TypeToken; +====3 +1:29a +2:31a +3:32c + import java.util.Collections; +==== +1:30a +2:33,34c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.atomic.AtomicReference; +3:34,36c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.atomic.AtomicInteger; + import java.util.concurrent.atomic.AtomicReference; +====3 +1:59c +2:63c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +3:65,66c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====3 +1:73c +2:77c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY); +3:80,81c + CUSTOM_OBJECT_TO_NUMBER_STRATEGY, CUSTOM_NUMBER_TO_NUMBER_STRATEGY, + Collections.emptyList()); +====3 +1:88a +2:92a +3:97,149c + public void testGetAdapter_Null() { + Gson gson = new Gson(); + try { + gson.getAdapter((TypeToken) null); + fail(); + } catch (NullPointerException e) { + assertEquals("type must not be null", e.getMessage()); + } + } + + public void testGetAdapter_Concurrency() { + final AtomicInteger adapterInstancesCreated = new AtomicInteger(0); + final AtomicReference> threadAdapter = new AtomicReference<>(); + final Class requestedType = Number.class; + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + private volatile boolean isFirstCall = true; + + @Override public TypeAdapter create(final Gson gson, TypeToken type) { + if (isFirstCall) { + isFirstCall = false; + + // Create a separate thread which requests an adapter for the same type + // This will cause this factory to return a different adapter instance than + // the one it is currently creating + Thread thread = new Thread() { + @Override public void run() { + threadAdapter.set(gson.getAdapter(requestedType)); + } + }; + thread.start(); + try { + thread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + // Create a new dummy adapter instance + adapterInstancesCreated.incrementAndGet(); + return new DummyAdapter<>(); + } + }) + .create(); + + TypeAdapter adapter = gson.getAdapter(requestedType); + assertTrue(adapter instanceof DummyAdapter); + assertEquals(2, adapterInstancesCreated.get()); + // Should be the same adapter instance the concurrent thread received + assertSame(threadAdapter.get(), adapter); + } + +==== +1:154a +2:159,374c + + /** + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + if (callCount == 0) { + callCount++; + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + } +3:216,581c + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a + * {@code new Gson()} should not affect the Gson instance it came from. + */ + public void testDefaultGsonNewBuilderModification() { + Gson gson = new Gson(); + GsonBuilder gsonBuilder = gson.newBuilder(); + + // Modifications of `gsonBuilder` should not affect `gson` object + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }); + + assertDefaultGson(gson); + // New GsonBuilder created from `gson` should not have been affected by changes either + assertDefaultGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should use custom adapters + assertCustomGson(gsonBuilder.create()); + } + + private static void assertDefaultGson(Gson gson) { + // Should use default reflective adapter + String json1 = gson.toJson(new CustomClass1()); + assertEquals("{}", json1); + + // Should use default reflective adapter + String json2 = gson.toJson(new CustomClass2()); + assertEquals("{}", json2); + + // Should use default instance creator + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals(CustomClass3.NO_ARG_CONSTRUCTOR_VALUE, customClass3.s); + } + + /** + * Modifying a GsonBuilder obtained from {@link Gson#newBuilder()} of a custom + * Gson instance (created using a GsonBuilder) should not affect the Gson instance + * it came from. + */ + public void testNewBuilderModification() { + Gson gson = new GsonBuilder() + .registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("custom-adapter"); + } + }) + .registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("custom-hierarchy-adapter"); + } + }) + .registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("custom-instance"); + } + }) + .create(); + + assertCustomGson(gson); + + // Modify `gson.newBuilder()` + GsonBuilder gsonBuilder = gson.newBuilder(); + gsonBuilder.registerTypeAdapter(CustomClass1.class, new TypeAdapter() { + @Override public CustomClass1 read(JsonReader in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { + out.value("overwritten custom-adapter"); + } + }); + gsonBuilder.registerTypeHierarchyAdapter(CustomClass2.class, new JsonSerializer() { + @Override public JsonElement serialize(CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { + return new JsonPrimitive("overwritten custom-hierarchy-adapter"); + } + }); + gsonBuilder.registerTypeAdapter(CustomClass3.class, new InstanceCreator() { + @Override public CustomClass3 createInstance(Type type) { + return new CustomClass3("overwritten custom-instance"); + } + }); + + // `gson` object should not have been affected by changes to new GsonBuilder + assertCustomGson(gson); + // New GsonBuilder based on `gson` should not have been affected either + assertCustomGson(gson.newBuilder().create()); + + // But new Gson instance from `gsonBuilder` should be affected by changes + Gson otherGson = gsonBuilder.create(); + String json1 = otherGson.toJson(new CustomClass1()); + assertEquals("\"overwritten custom-adapter\"", json1); + + String json2 = otherGson.toJson(new CustomClass2()); + assertEquals("\"overwritten custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = otherGson.fromJson("{}", CustomClass3.class); + assertEquals("overwritten custom-instance", customClass3.s); + } + + private static void assertCustomGson(Gson gson) { + String json1 = gson.toJson(new CustomClass1()); + assertEquals("\"custom-adapter\"", json1); + + String json2 = gson.toJson(new CustomClass2()); + assertEquals("\"custom-hierarchy-adapter\"", json2); + + CustomClass3 customClass3 = gson.fromJson("{}", CustomClass3.class); + assertEquals("custom-instance", customClass3.s); + } + + static class CustomClass1 { } + static class CustomClass2 { } + static class CustomClass3 { + static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance"; + + final String s; + + public CustomClass3(String s) { + this.s = s; + } + + public CustomClass3() { + this(NO_ARG_CONSTRUCTOR_VALUE); + } + } + + /** + * Verifies that {@link Gson#getAdapter(TypeToken)} does not put broken adapters + * into {@code typeTokenCache} when caller of nested {@code getAdapter} discards + * exception, e.g.: + * + * Field dependencies: + * ClassA + * -> ClassB1 + * -> ClassC -> ClassB1 + * -> ClassX + * | ClassB2 + * + * Let's assume the factory for ClassX throws an exception. + * 1. Factory for ClassA finds field of type ClassB1 + * 2. Factory for ClassB1 finds field of type ClassC + * 3. Factory for ClassC find fields of type ClassB1 => stores future adapter + * 4. Factory for ClassB1 finds field of type ClassX => ClassX factory throws exception + * 5. Factory for ClassA ignores exception from getAdapter(ClassB1) and tries as alternative getting + * adapter for ClassB2 + * + * Then Gson must not cache adapter for ClassC because it refers to broken adapter + * for ClassB1 (since ClassX threw exception). + */ + public void testGetAdapterDiscardedException() throws Exception { + final TypeAdapter alternativeAdapter = new DummyAdapter<>(); + final AtomicReference> leakedAdapter = new AtomicReference<>(); + + Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + if (type.getRawType() == CustomClassA.class) { + // Factory will throw for CustomClassB1; discard exception + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + @SuppressWarnings("unchecked") + TypeAdapter adapter = (TypeAdapter) alternativeAdapter; + return adapter; + } + else if (type.getRawType() == CustomClassB1.class) { + gson.getAdapter(CustomClassC.class); + // Will throw exception + gson.getAdapter(CustomClassX.class); + + throw new AssertionError("Factory should have thrown exception for CustomClassX"); + } + else if (type.getRawType() == CustomClassC.class) { + // Will return future adapter due to cyclic dependency B1 -> C -> B1 + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + assertTrue(adapter instanceof FutureTypeAdapter); + // Pretend this factory somehow leaks this FutureTypeAdapter + leakedAdapter.set(adapter); + return new DummyAdapter(); + } + else if (type.getRawType() == CustomClassX.class) { + // Always throw exception + throw new RuntimeException("test exception"); + } + + throw new AssertionError("Requested adapter for unexpected type: " + type); + } + }) + .create(); + + assertSame(alternativeAdapter, gson.getAdapter(CustomClassA.class)); + // Gson must not have cached broken adapters for CustomClassB1 and CustomClassC + try { + gson.getAdapter(CustomClassB1.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + try { + gson.getAdapter(CustomClassC.class); + fail("Expected exception"); + } catch (Exception e) { + assertEquals("test exception", e.getMessage()); + } + + // Leaked adapter should have been marked as "broken" + try { + leakedAdapter.get().fromJson("{}"); + fail("Expected exception"); + } catch (IllegalStateException e) { + assertEquals("Broken adapter has been leaked by TypeAdapterFactory", e.getMessage()); + } + } + + /** + * Verifies that two threads calling {@link Gson#getAdapter(TypeToken)} do not see the + * same unresolved {@link FutureTypeAdapter} instance, since that would not be thread-safe. + * + * This test constructs the cyclic dependency CustomClassA -> CustomClassB1 -> CustomClassA + * and lets one thread wait after the adapter for CustomClassB1 has been obtained (which still + * contains the nested unresolved FutureTypeAdapter for CustomClassA). + */ + public void testGetAdapterFutureAdapterConcurrency() throws Exception { + /** + * Adapter which wraps another adapter. Can be imagined as a simplified version of the + * ReflectiveTypeAdapterFactory$Adapter. + */ + class WrappingAdapter extends TypeAdapter { + final TypeAdapter wrapped; + int callCount = 0; + + WrappingAdapter(TypeAdapter wrapped) { + this.wrapped = wrapped; + } + + @Override public void write(JsonWriter out, T value) throws IOException { + // Due to how this test is set up there is infinite recursion, therefore + // need to track how deeply nested this call is + try { + if (callCount++ == 0) { + out.beginArray(); + wrapped.write(out, null); + out.endArray(); + } else { + out.value("wrapped-nested"); + } + } finally { + callCount--; + } + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + final CountDownLatch isThreadWaiting = new CountDownLatch(1); + final CountDownLatch canThreadProceed = new CountDownLatch(1); + + final Gson gson = new GsonBuilder() + .registerTypeAdapterFactory(new TypeAdapterFactory() { + // volatile instead of AtomicBoolean is safe here because CountDownLatch prevents + // "true" concurrency + volatile boolean isFirstCaller = true; + + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + Class raw = type.getRawType(); + + if (raw == CustomClassA.class) { + // Retrieves a WrappingAdapter containing a nested FutureAdapter for CustomClassA + TypeAdapter adapter = gson.getAdapter(CustomClassB1.class); + + // Let thread wait so the FutureAdapter for CustomClassA nested in the adapter + // for CustomClassB1 has not been resolved yet + if (isFirstCaller) { + isFirstCaller = false; + isThreadWaiting.countDown(); + + try { + canThreadProceed.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + return new WrappingAdapter<>(adapter); + } + else if (raw == CustomClassB1.class) { + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + assertTrue(adapter instanceof FutureTypeAdapter); + return new WrappingAdapter<>(adapter); + } + else { + throw new AssertionError("Adapter for unexpected type requested: " + raw); + } + } + }) + .create(); + + final AtomicReference> otherThreadAdapter = new AtomicReference<>(); + Thread thread = new Thread() { + @Override + public void run() { + otherThreadAdapter.set(gson.getAdapter(CustomClassA.class)); + } + }; + thread.start(); + + // Wait until other thread has obtained FutureAdapter + isThreadWaiting.await(); + TypeAdapter adapter = gson.getAdapter(CustomClassA.class); + // Should not fail due to referring to unresolved FutureTypeAdapter + assertEquals("[[\"wrapped-nested\"]]", adapter.toJson(null)); + + // Let other thread proceed and have it resolve its FutureTypeAdapter + canThreadProceed.countDown(); + thread.join(); + assertEquals("[[\"wrapped-nested\"]]", otherThreadAdapter.get().toJson(null)); + } + + private static class DummyAdapter extends TypeAdapter { + @Override public void write(JsonWriter out, T value) throws IOException { + throw new AssertionError("not needed for this test"); + } + + @Override public T read(JsonReader in) throws IOException { + throw new AssertionError("not needed for this test"); + } + } + + private static class CustomClassA { + } + private static class CustomClassB1 { + } + private static class CustomClassC { + } + private static class CustomClassX { + } diff --git a/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_AsyncNotifyService.java.txt b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_AsyncNotifyService.java.txt new file mode 100644 index 0000000000..b0d839171d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_AsyncNotifyService.java.txt @@ -0,0 +1,74 @@ +====3 +1:35,36c +2:35,36c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +3:35,36c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:107c + +2:107c +3:107c + +====3 +1:130c +2:130c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +3:130c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +==== +1:136,137c + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +2:136,138c + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, + String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +3:136,137c + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====1 +1:157c + +2:158c +3:157c + +====3 +1:171c +2:172c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +3:171c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +====3 +1:177c +2:178c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +3:177c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +====3 +1:199c +2:200c + InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +3:199c + InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +====3 +1:262c +2:263c + this.url = MessageFormat.format(URL_PATTERN, target, ApplicationUtils.getContextPath(), dataId, group); +3:262c + this.url = MessageFormat.format(URL_PATTERN, target, EnvUtil.getContextPath(), dataId, group); +====3 +1:265c +2:266c + .format(URL_PATTERN_TENANT, target, ApplicationUtils.getContextPath(), dataId, group, tenant); +3:265c + .format(URL_PATTERN_TENANT, target, EnvUtil.getContextPath(), dataId, group, tenant); +====1 +1:312c + } +\ No newline at end of file +2:313c +3:312c + } diff --git a/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_ConfigController.java.txt b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_ConfigController.java.txt new file mode 100644 index 0000000000..b8fee64e3d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_ConfigController.java.txt @@ -0,0 +1,183 @@ +====3 +1:18a +2:18a +3:19c + import com.alibaba.nacos.api.config.ConfigType; +====3 +1:22a +2:22a +3:24c + import com.alibaba.nacos.common.model.RestResultUtils; +====1 +1:34a +2:35c +3:37c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +====1 +1:38c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +2:38a +3:40a +====3 +1:45a +2:45a +3:48c + import com.alibaba.nacos.common.utils.NamespaceUtil; +====3 +1:48c +2:48c + import com.alibaba.nacos.core.utils.InetUtils; +3:51c + import com.alibaba.nacos.sys.utils.InetUtils; +====3 +1:93,94c +2:93,94c + private static final String NAMESPACE_PUBLIC_KEY = "public"; + +3:95a +====3 +1:137a +2:137a +3:139,142c + //check type + if (!ConfigType.isValidType(type)) { + type = ConfigType.getDefaultType().getType(); + } +====3 +1:178c +2:178c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIp(), +3:183c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====3 +1:199c +2:199c + tenant = processTenant(tenant); +3:204c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====3 +1:283c +2:283c + return ResultBuilder.buildSuccessResult(true); +3:288c + return RestResultUtils.success(true); +====3 +1:472c +2:472c + tenant = processTenant(tenant); +3:477c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====3 +1:527c +2:527c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +3:532c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====3 +1:530,534c +2:530,534c + if (StringUtils.isNotBlank(namespace)) { + if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); + } +3:535,538c + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====3 +1:535a +2:535a +3:540c + +====3 +1:548c +2:548c + return ResultBuilder.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +3:553c + return RestResultUtils.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +====3 +1:560c +2:560c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +3:565c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +====3 +1:584c +2:584c + return ResultBuilder.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +3:589c + return RestResultUtils.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +====3 +1:588c +2:588c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +3:593c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====3 +1:601c +2:601c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +3:606c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====3 +1:604c +2:604c + return ResultBuilder.buildSuccessResult("导入成功", saveResult); +3:609c + return RestResultUtils.success("导入成功", saveResult); +====3 +1:628c +2:628c + return ResultBuilder.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +3:633c + return RestResultUtils.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +====3 +1:631,634c +2:631,634c + + if (NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(namespace)) { + namespace = ""; + } else if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { +3:636,638c + + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { +====3 +1:636c +2:636c + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +3:640c + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====3 +1:650c +2:650c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +3:654c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====3 +1:674c +2:674c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +3:678c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====3 +1:687c +2:687c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +3:691c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====3 +1:690,697c +2:690,697c + return ResultBuilder.buildSuccessResult("Clone Completed Successfully", saveResult); + } + + private String processTenant(String tenant) { + if (StringUtils.isEmpty(tenant) || NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(tenant)) { + return ""; + } + return tenant; +3:694c + return RestResultUtils.success("Clone Completed Successfully", saveResult); diff --git a/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_DiskUtils.java.txt b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_DiskUtils.java.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_EmbeddedRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_EmbeddedRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..c5d1158c4b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_EmbeddedRolePersistServiceImpl.java.txt @@ -0,0 +1,163 @@ +====1 +1:21c + import com.alibaba.nacos.config.server.service.repository.embedded.EmbeddedStoragePersistServiceImpl; +2:20a +3:20a +====1 +1:23a +2:23c +3:23c + import com.alibaba.nacos.config.server.service.repository.embedded.EmbeddedStoragePersistServiceImpl; +====3 +1:30a +2:30a +3:31c + import java.util.Collections; +====2 +1:43c +3:44c + +2:43c + +====2 +1:46c +3:47c + +2:46c + +====2 +1:49c +3:50c + +2:49c + +====2 +1:51c +3:52c + +2:51c + +====2 +1:53c +3:54c + +2:53c + +====2 +1:56c +3:57c + +2:56c + +====2 +1:58c +3:59c + +2:58c + +====2 +1:68c +3:69c + +2:68c + +====2 +1:70c +3:71c + +2:70c + +====2 +1:72c +3:73c + +2:72c + +====2 +1:74c +3:75c + +2:74c + +==== +1:77,80c + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { +2:77,80c + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { +3:78,83c + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { +==== +1:83,84c + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, +2:83,84c + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, +3:86,87c + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, +====2 +1:86c +3:89c + +2:86c + +====2 +1:88c +3:91c + +2:88c + +====2 +1:96c +3:99c + +2:96c + +====2 +1:98c +3:101c + +2:98c + +====2 +1:106c +3:109c + +2:106c + +====2 +1:121c +3:124c + +2:121c + +====2 +1:137c +3:140c + +2:137c + +====2 +1:141c +3:144c + List users = databaseOperate.queryMany(sql, new String[] {"%" + role + "%"}, String.class); +2:141c + List users = databaseOperate.queryMany(sql, new String[]{"%" + role + "%"}, String.class); +====2 +1:144c +3:147c + +2:144c + diff --git a/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_ExternalDataSourceServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_ExternalDataSourceServiceImpl.java.txt new file mode 100644 index 0000000000..0492051512 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_ExternalDataSourceServiceImpl.java.txt @@ -0,0 +1,150 @@ +====1 +1:19,39c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import javax.sql.DataSource; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + +2:18a +3:18a +====3 +1:40a +2:19a +3:20c + import com.alibaba.nacos.common.utils.IPUtil; +====2 +1:43c +3:23c + import com.alibaba.nacos.config.server.utils.ConfigExecutor; +2:21a +====3 +1:45c +2:23c + import com.alibaba.nacos.core.utils.ApplicationUtils; +3:25c + import com.alibaba.nacos.sys.env.EnvUtil; +==== +1:46a +2:25,42c + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +3:27,41c + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +====3 +1:55,58c +2:51,54c + private static final Logger LOGGER = LoggerFactory.getLogger(ExternalDataSourceServiceImpl.class); + + private static final String JDBC_DRIVER_NAME = "com.mysql.cj.jdbc.Driver"; + +3:49a +====3 +1:86,87c +2:82,83c + private static Pattern ipPattern = Pattern.compile("\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}"); + +3:76a +====1 +1:120,122c + + ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +2:116,118c +3:109,111c + + // ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + // ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +====3 +1:130c +2:126c + .build(ApplicationUtils.getEnvironment(), (dataSource) -> { +3:119c + .build(EnvUtil.getEnvironment(), (dataSource) -> { +====3 +1:193c +2:189c + return "DOWN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +3:182c + return "DOWN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====3 +1:196c +2:192c + return "WARN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +3:185c + return "WARN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====3 +1:204,217c +2:200,213c + private String getIpFromUrl(String url) { + + Matcher m = ipPattern.matcher(url); + if (m.find()) { + return m.group(); + } + + return ""; + } + + static String defaultIfNull(String value, String defaultValue) { + return null == value ? defaultValue : value; + } + +3:192a +====3 +1:272c +2:268c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +3:247c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); +====3 +1:275c +2:271c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +3:250c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); diff --git a/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_ExternalPermissionPersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_ExternalPermissionPersistServiceImpl.java.txt new file mode 100644 index 0000000000..5424bf2de0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_ExternalPermissionPersistServiceImpl.java.txt @@ -0,0 +1,128 @@ +==== +1:21,24c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; +2:21,24c + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; +3:21,28c + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; +====1 +1:27,28c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; +2:27c +3:31c + import org.springframework.data.domain.PageRequest; +==== +1:31,35c + import javax.annotation.PostConstruct; + import java.util.ArrayList; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.PERMISSION_ROW_MAPPER; + +2:29a +3:34,40c + import javax.annotation.PostConstruct; + import java.util.ArrayList; + import java.util.Collections; + import java.util.List; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.PERMISSION_ROW_MAPPER; + +====1 +1:46,53c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:40c +3:51c + private PermissionsRepository permissionsRepository; +====1 +1:56,83c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role='" + role + "' "; + + if (StringUtils.isBlank(role)) { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:43,50c +3:54,61c + org.springframework.data.domain.Page sPage = permissionsRepository + .findAll(QPermissionsEntity.permissionsEntity.role.eq(role), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(PermissionsMapStruct.INSTANCE.convertPermissionInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:94,102c + + String sql = "INSERT into permissions (role, resource, action) VALUES (?, ?, ?)"; + + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:61,62c +3:72,73c + + permissionsRepository.save(new PermissionsEntity(role, resource, action)); +====1 +1:113,120c + + String sql = "DELETE from permissions WHERE role=? and resource=? and action=?"; + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:73,77c +3:84,88c + + QPermissionsEntity qPermissions = QPermissionsEntity.permissionsEntity; + permissionsRepository.findOne( + qPermissions.role.eq(role).and(qPermissions.resource.eq(resource)).and(qPermissions.action.eq(action))) + .ifPresent(p -> permissionsRepository.delete(p)); diff --git a/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_ExternalRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_ExternalRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..17328923af --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_ExternalRolePersistServiceImpl.java.txt @@ -0,0 +1,204 @@ +==== +1:21,24c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; +2:21,24c + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; +3:21,28c + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; +====1 +1:27,29c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.RowMapper; +2:27c +3:31c + import org.springframework.data.domain.PageRequest; +==== +1:32,35c + import javax.annotation.PostConstruct; + import java.sql.ResultSet; + import java.sql.SQLException; + import java.util.ArrayList; +2:29a +3:34,38c + import javax.annotation.PostConstruct; + import java.sql.ResultSet; + import java.sql.SQLException; + import java.util.ArrayList; + import java.util.Collections; +====1 +1:36a +2:31c +3:40c + import java.util.stream.Collectors; +====1 +1:38c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.ROLE_INFO_ROW_MAPPER; +2:32a +3:41a +====1 +1:50,57c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:44c +3:53c + private RolesRepository rolesRepository; +====1 +1:61,81c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from (select distinct role from roles) roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " 1=1 "; + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + return pageInfo; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:48,55c +3:57,64c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:86,104c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + where = " 1=1 "; + } + + try { + return helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:60,67c +3:69,76c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(QRolesEntity.rolesEntity.username.eq(username), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:110c + * @param role role string value. +2:73c +3:82c + * @param role role string value. +====1 +1:115,122c + String sql = "INSERT into roles (role, username) VALUES (?, ?)"; + + try { + jt.update(sql, role, userName); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:78c +3:87c + rolesRepository.save(new RolesEntity(userName, role)); +====1 +1:131,137c + String sql = "DELETE from roles WHERE role=?"; + try { + jt.update(sql, role); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:87,88c +3:96,97c + Iterable iterable = rolesRepository.findAll(QRolesEntity.rolesEntity.role.eq(role)); + rolesRepository.deleteAll(iterable); +====1 +1:143c + * @param role role string value. +2:94c +3:103c + * @param role role string value. +====1 +1:147,153c + String sql = "DELETE from roles WHERE role=? and username=?"; + try { + jt.update(sql, role, username); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:98,100c +3:107,109c + QRolesEntity qRoles = QRolesEntity.rolesEntity; + rolesRepository.findOne(qRoles.role.eq(role).and(qRoles.username.eq(username))) + .ifPresent(s -> rolesRepository.delete(s)); +====1 +1:158,160c + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[]{role}, String.class); + return users; +2:105,107c +3:114,116c + List rolesEntities = (List) rolesRepository + .findAll(QRolesEntity.rolesEntity.role.like(role)); + return rolesEntities.stream().map(s -> s.getRole()).collect(Collectors.toList()); +====1 +1:163,172c + private static final class RoleInfoRowMapper implements RowMapper { + + @Override + public RoleInfo mapRow(ResultSet rs, int rowNum) throws SQLException { + RoleInfo roleInfo = new RoleInfo(); + roleInfo.setRole(rs.getString("role")); + roleInfo.setUsername(rs.getString("username")); + return roleInfo; + } + } +2:109a +3:118a diff --git a/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_ExternalStoragePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_ExternalStoragePersistServiceImpl.java.txt new file mode 100644 index 0000000000..d4cc7360ae --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_ExternalStoragePersistServiceImpl.java.txt @@ -0,0 +1,2999 @@ +====1 +1:41,42c + import com.alibaba.nacos.config.server.service.datasource.DataSourceService; + import com.alibaba.nacos.config.server.service.datasource.DynamicDataSource; +2:41,74c +3:41,74c + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.HisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.QHisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QTenantInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.TenantInfoEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAdvanceInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAllInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigHistoryInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4BetaMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4TagMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoAggrMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoBetaWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoChangedMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoEntityMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoTagWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.TenantInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoAggrRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoBetaRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoTagRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigTagsRelationRepository; + import com.alibaba.nacos.config.server.modules.repository.HisConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.TenantInfoRepository; +====1 +1:48c + import com.google.common.collect.Lists; +2:80,81c +3:80,81c + import com.querydsl.core.BooleanBuilder; + import lombok.extern.slf4j.Slf4j; +====1 +1:50a +2:84,85c +3:84,85c + import org.springframework.beans.BeanUtils; + import org.springframework.beans.factory.annotation.Autowired; +====1 +1:55c + import org.springframework.dao.IncorrectResultSizeDataAccessException; +2:90,92c +3:90,92c + import org.springframework.data.domain.PageRequest; + import org.springframework.data.domain.Sort; + import org.springframework.data.jpa.domain.Specification; +====1 +1:57,61c + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.PreparedStatementCreator; + import org.springframework.jdbc.core.PreparedStatementSetter; + import org.springframework.jdbc.support.GeneratedKeyHolder; + import org.springframework.jdbc.support.KeyHolder; +2:93a +3:93a +====1 +1:70c + import javax.annotation.PostConstruct; +2:102,105c +3:102,105c + import javax.persistence.criteria.CriteriaBuilder; + import javax.persistence.criteria.CriteriaQuery; + import javax.persistence.criteria.Predicate; + import javax.persistence.criteria.Root; +====1 +1:72,75c + import java.sql.Connection; + import java.sql.PreparedStatement; + import java.sql.SQLException; + import java.sql.Statement; +2:106a +3:106a +====1 +1:81a +2:113c +3:113c + import java.util.stream.Collectors; +====1 +1:83,97c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ADVANCE_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ALL_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4TAG_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_AGGR_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BASE_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_CHANGED_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_KEY_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_DETAIL_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_LIST_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.TENANT_INFO_ROW_MAPPER; +2:114a +3:114a +====1 +1:104a +2:122c +3:122c + @Slf4j +====1 +1:110c + private DataSourceService dataSourceService; +2:128,129c +3:128,129c + @Autowired + private ConfigInfoRepository configInfoRepository; +====1 +1:112c + private static final String SQL_FIND_ALL_CONFIG_INFO = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,c_schema from config_info"; +2:131,132c +3:131,132c + @Autowired + private ConfigInfoBetaRepository configInfoBetaRepository; +====1 +1:114c + private static final String SQL_TENANT_INFO_COUNT_BY_TENANT_ID = "select count(1) from tenant_info where tenant_id = ?"; +2:134,135c +3:134,135c + @Autowired + private ConfigInfoTagRepository configInfoTagRepository; +====1 +1:116c + private static final String SQL_FIND_CONFIG_INFO_BY_IDS = "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5 FROM config_info WHERE "; +2:137,138c +3:137,138c + @Autowired + private ConfigTagsRelationRepository configTagsRelationRepository; +====1 +1:118c + private static final String SQL_DELETE_CONFIG_INFO_BY_IDS = "DELETE FROM config_info WHERE "; +2:140,141c +3:140,141c + @Autowired + private HisConfigInfoRepository hisConfigInfoRepository; +====1 +1:120c + private static final String PATTERN_STR = "*"; +2:143,144c +3:143,144c + @Autowired + private TenantInfoRepository tenantInfoRepository; +====1 +1:122c + private static final int QUERY_LIMIT_SIZE = 50; +2:146,147c +3:146,147c + @Autowired + private ConfigInfoAggrRepository configInfoAggrRepository; +====1 +1:124,126c + protected JdbcTemplate jt; + + protected TransactionTemplate tjt; +2:149,150c +3:149,150c + @Autowired + private TransactionTemplate tjt; +====1 +1:133,171c + /** + * init datasource. + */ + @PostConstruct + public void init() { + dataSourceService = DynamicDataSource.getInstance().getDataSource(); + + jt = getJdbcTemplate(); + tjt = getTransactionTemplate(); + } + + public boolean checkMasterWritable() { + return dataSourceService.checkMasterWritable(); + } + + public void setBasicDataSourceService(DataSourceService dataSourceService) { + this.dataSourceService = dataSourceService; + } + + public synchronized void reload() throws IOException { + this.dataSourceService.reload(); + } + + /** + * For unit testing. + */ + public JdbcTemplate getJdbcTemplate() { + return this.dataSourceService.getJdbcTemplate(); + } + + public TransactionTemplate getTransactionTemplate() { + return this.dataSourceService.getTransactionTemplate(); + } + + @SuppressWarnings("checkstyle:AbbreviationAsWordInName") + public String getCurrentDBUrl() { + return this.dataSourceService.getCurrentDbUrl(); + } + +2:156a +3:156a +====1 +1:174c + return new ExternalStoragePaginationHelperImpl(jt); +2:159c +3:159c + return null; +====1 +1:182,191c + boolean result = tjt.execute(status -> { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:167,183c +3:167,183c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfoEntity.getGroupId(), + configInfoEntity.getTenantId()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; + } + return Boolean.TRUE; +====1 +1:193c + return Boolean.TRUE; +2:184a +3:184a +====1 +1:202c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:193c +3:193c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:204,207c + jt.update("INSERT INTO config_info_beta(data_id,group_id,tenant_id,app_name,content,md5,beta_ips,src_ip," + + "src_user,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, appNameTmp, configInfo.getContent(), md5, betaIps, srcIp, srcUser, + time, time); +2:195,207c +3:195,207c + ConfigInfoBetaEntity configInfoBeta = new ConfigInfoBetaEntity(); + configInfoBeta.setDataId(configInfo.getDataId()); + configInfoBeta.setGroupId(configInfo.getGroup()); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setBetaIps(betaIps); + configInfoBeta.setMd5(md5); + configInfoBeta.setGmtCreate(time); + configInfoBeta.setGmtModified(time); + configInfoBeta.setSrcUser(srcUser); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setTenantId(tenantTmp); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:209c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:209c +3:209c + log.error("[db-error] " + e.toString(), e); +====1 +1:220c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:219a +3:219a +====1 +1:222,226c + jt.update( + "INSERT INTO config_info_tag(data_id,group_id,tenant_id,tag_id,app_name,content,md5,src_ip,src_user," + + "gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, tagTmp, appNameTmp, configInfo.getContent(), md5, srcIp, srcUser, + time, time); +2:221,234c +3:221,234c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoTagEntity configInfoTag = new ConfigInfoTagEntity(); + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:228c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:236c +3:236c + log.error("[db-error] " + e.toString(), e); +====1 +1:236,254c + boolean result = tjt.execute(status -> { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + /* + If the appName passed by the user is not empty, use the persistent user's appName, + otherwise use db; when emptying appName, you need to pass an empty string + */ + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // delete all tags and then recreate + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); +2:244,268c +3:244,268c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + // 用户传过来的appName不为空,则用持久化用户的appName,否则用db的;清空appName的时候需要传空串 + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + configInfo.setId(oldConfigInfo.getId()); + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // 删除所有tag,然后再重新创建 + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); + } + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:256,259c + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:270c +3:270c + return Boolean.TRUE; +====1 +1:261c + return Boolean.TRUE; +2:271a +3:271a +====1 +1:268c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); +2:277a +3:277a +====1 +1:270,275c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + try { + jt.update( + "UPDATE config_info_beta SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5, srcIp, srcUser, + time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp); +2:279,300c +3:279,300c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(configInfo.getDataId())) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(configInfo.getDataId())); + } + if (StringUtils.isNotBlank(configInfo.getGroup())) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(configInfo.getGroup())); + } + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenantTmp)); + } + ConfigInfoBetaEntity configInfoBeta = configInfoBetaRepository.findOne(booleanBuilder).orElse(null); + try { + String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setMd5(md5); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setSrcUser(srcUser); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:277c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:302c +3:302c + log.error("[db-error] " + e.toString(), e); +====1 +1:288,293c + try { + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + jt.update( + "UPDATE config_info_tag SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", configInfo.getContent(), md5, + srcIp, srcUser, time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp, tagTmp); +2:313,331c +3:313,331c + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + ConfigInfoTagEntity configInfoTag = configInfoTagRepository.findOne( + qConfigInfoTag.dataId.eq(configInfo.getDataId()).and(qConfigInfoTag.groupId.eq(configInfo.getGroup())) + .and(qConfigInfoTag.tenantId.eq(tenantTmp)).and(qConfigInfoTag.tagId.eq(tagTmp))) + .orElse(new ConfigInfoTagEntity()); + try { + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:295c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:333c +3:333c + log.error("[db-error] " + e.toString(), e); +====1 +1:323,330c + try { + jt.update( + "UPDATE config_info SET md5 = ? WHERE data_id=? AND group_id=? AND tenant_id=? AND gmt_modified=?", + md5, dataId, group, tenantTmp, lastTime); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:361,378c +3:361,378c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (lastTime != null) { + booleanBuilder.and(qConfigInfo.gmtModified.eq(lastTime)); + } + configInfoRepository.findOne(booleanBuilder).ifPresent(config -> { + config.setMd5(md5); + configInfoRepository.save(config); + }); +====1 +1:416,421c + tjt.execute(status -> { + try { + ConfigInfo configInfo = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo != null) { + jt.update("DELETE FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, + group, tenantTmp); +2:464,474c +3:464,474c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo4Beta configInfo4Beta = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo4Beta != null) { + configInfoBetaRepository.deleteById(configInfo4Beta.getId()); + } + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:423,425c + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:476c +3:476c + return Boolean.TRUE; +====1 +1:427c + return Boolean.TRUE; +2:477a +3:477a +====1 +1:439,442c + String select = "SELECT content FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + String insert = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + String update = "UPDATE config_info_aggr SET content = ? , gmt_modified = ? WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + +2:489,502c +3:489,502c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } +====1 +1:445,446c + String dbContent = jt + .queryForObject(select, new Object[] {dataId, group, tenantTmp, datumId}, String.class); +2:505c +3:505c + ConfigInfoAggrEntity result = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); +====1 +1:448c + if (dbContent != null && dbContent.equals(content)) { +2:507c +3:507c + if (result.getContent() != null && result.getContent().equals(content)) { +====1 +1:451c + return jt.update(update, content, now, dataId, group, tenantTmp, datumId) > 0; +2:510,513c +3:510,513c + result.setContent(content); + result.setGmtModified(now); + configInfoAggrRepository.save(result); + return true; +====1 +1:454c + return jt.update(insert, dataId, group, tenantTmp, datumId, appNameTmp, content, now) > 0; +2:516,526c +3:516,526c + ConfigInfoAggrEntity configInfoAggrEntity = new ConfigInfoAggrEntity(); + configInfoAggrEntity.setDataId(dataId); + configInfoAggrEntity.setGroupId(group); + configInfoAggrEntity.setDatumId(datumId); + configInfoAggrEntity.setContent(content); + configInfoAggrEntity.setGmtModified(now); + configInfoAggrEntity.setAppName(appNameTmp); + configInfoAggrEntity.setTenantId(tenantTmp); + configInfoAggrRepository.save(configInfoAggrEntity); + return true; + +====1 +1:465,466c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; +2:536a +3:536a +====1 +1:468,482c + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index++, tenantTmp); + ps.setString(index, datumId); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:537a +3:537a +====1 +1:487,502c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=?"; + + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index, tenantTmp); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:542,545c +3:542,545c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); +====1 +1:503a +2:547,553c +3:547,553c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + configInfoAggrRepository.findOne(booleanBuilder).ifPresent(aggr -> configInfoAggrRepository.delete(aggr)); +====1 +1:509,523c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final StringBuilder datumString = new StringBuilder(); + for (String datum : datumList) { + datumString.append("'").append(datum).append("',"); + } + datumString.deleteCharAt(datumString.length() - 1); + final String sql = + "delete from config_info_aggr where data_id=? and group_id=? and tenant_id=? and datum_id in (" + + datumString.toString() + ")"; + try { + jt.update(sql, dataId, group, tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:558a +3:558a +====1 +1:529,536c + String sql = "delete from his_config_info where gmt_modified < ? limit ?"; + PaginationHelper helper = createPaginationHelper(); + try { + helper.updateLimit(sql, new Object[] {startTime, limitSize}); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:564,567c +3:564,567c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository + .findAll(qHisConfigInfo.gmtModified.lt(startTime), PageRequest.of(0, limitSize)); + hisConfigInfoRepository.deleteAll(iterable); +====1 +1:541,542c + String sql = "SELECT COUNT(*) FROM his_config_info WHERE gmt_modified < ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {startTime}); +2:572,573c +3:572,573c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Long result = hisConfigInfoRepository.count(qHisConfigInfo.gmtModified.lt(startTime)); +====1 +1:551c + String sql = "SELECT max(id) FROM config_info"; +2:581a +3:581a +====1 +1:553c + return jt.queryForObject(sql, Long.class); +2:583,584c +3:583,584c + //TODO 关系型特性查询 + return configInfoRepository.findConfigMaxId(); +====1 +1:591,617c + try { + Boolean isReplaceOk = tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + String appNameTmp = appName == null ? "" : appName; + removeAggrConfigInfo(dataId, group, tenant); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + for (Map.Entry datumEntry : datumMap.entrySet()) { + jt.update(sql, dataId, group, tenantTmp, datumEntry.getKey(), appNameTmp, + datumEntry.getValue(), new Timestamp(System.currentTimeMillis())); + } + } catch (Throwable e) { + throw new TransactionSystemException("error in addAggrConfigInfo"); + } + return Boolean.TRUE; + } + }); + if (isReplaceOk == null) { + return false; + } + return isReplaceOk; + } catch (TransactionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:622c +3:622c + return true; +====1 +1:624,636c + String sql = "SELECT DISTINCT data_id, group_id FROM config_info"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:629c +3:629c + return null; +====1 +1:641,651c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,beta_ips FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO4BETA_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:634,647c +3:634,647c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenant)); + } + ConfigInfoBetaEntity configInfoBetaEntity = configInfoBetaRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoBeta data null")); + return ConfigInfo4BetaMapStruct.INSTANCE.convertConfigInfo4Beta(configInfoBetaEntity); +====1 +1:659,668c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,tag_id,app_name,content FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", + new Object[] {dataId, group, tenantTmp, tagTmp}, CONFIG_INFO4TAG_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:655,671c +3:655,671c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + ConfigInfoTagEntity result = configInfoTagRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoTag data null")); + return ConfigInfo4TagMapStruct.INSTANCE.convertConfigInfo4Tag(result); +====1 +1:674,684c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=? AND app_name=?", + new Object[] {dataId, group, tenantTmp, appName}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:677c +3:677c + return null; +====1 +1:690,733c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(group); + paramList.add(tenantTmp); + + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and group_id=? and tenant_id=? "); + if (StringUtils.isNotBlank(configTags)) { + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.group_id=? and a.tenant_id=? "); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sql.append(", "); + } + sql.append("?"); + paramList.add(tagArr[i]); + } + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return this.jt.queryForObject(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:683c +3:683c + return null; +====1 +1:739,748c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, StringUtils.EMPTY}, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:689,695c +3:689,695c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + return configInfoRepository.findOne(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group))).map(s -> { + ConfigInfoBase configInfoBase = new ConfigInfoBase(); + BeanUtils.copyProperties(s, configInfoBase); + configInfoBase.setGroup(s.getGroupId()); + return configInfoBase; + }).orElse(null); +====1 +1:753,762c + try { + return this.jt + .queryForObject("SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE ID=?", + new Object[] {id}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:700,701c +3:700,701c + + return null; +====1 +1:767,776c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5,type FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:706,712c +3:706,712c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:777a +2:714,718c +3:714,718c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity result = configInfoRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfo(result); +====1 +1:783,792c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:724,725c +3:724,725c + + return null; +====1 +1:798,807c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? and app_name=?", + new Object[] {dataId, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:731,732c +3:731,732c + + return null; +====1 +1:813,864c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where data_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:738c +3:738c + return null; +====1 +1:870,871c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); +2:743a +3:743a +====1 +1:874,922c + String sqlCount = "select count(*) from config_info"; + String sql = "select ID,data_id,group_id,tenant_id,app_name,content,type from config_info"; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id"; + sql = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id"; + + where.append(" a.tenant_id=? "); + + if (StringUtils.isNotBlank(dataId)) { + where.append(" and a.data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and a.group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and a.app_name=? "); + paramList.add(appName); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id=? "); + if (StringUtils.isNotBlank(dataId)) { + where.append(" and data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and app_name=? "); + paramList.add(appName); + } +2:746,765c +3:746,765c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + } + + private void buildConfigInfoCommonCondition(BooleanBuilder booleanBuilder, QConfigInfoEntity qConfigInfo, + final String dataId, final String group, final String appName) { + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); +====1 +1:924,929c + try { + return helper.fetchPage(sqlCount + where, sql + where, paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:767,771c +3:767,771c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(appName)) { + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:935,943c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:777c +3:777c + return null; +====1 +1:949,958c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:783c +3:783c + return null; +====1 +1:964,973c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=? and app_name =?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? and app_name =?", + new Object[] {group, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:789c +3:789c + return null; +====1 +1:979,1032c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder( + "select count(*) from config_info where group_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(group); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:795c +3:795c + return null; +====1 +1:1038,1047c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where tenant_id like ? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? and app_name=?", + new Object[] {generateLikeArgument(tenantTmp), appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:801c +3:801c + return null; +====1 +1:1053,1104c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where tenant_id like ? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:807c +3:807c + return null; +====1 +1:1110,1118c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:813c +3:813c + return null; +====1 +1:1123,1124c + String sql = " SELECT COUNT(ID) FROM config_info "; + Integer result = jt.queryForObject(sql, Integer.class); +2:818c +3:818c + Long result = configInfoRepository.count(); +====1 +1:1133,1134c + String sql = " SELECT COUNT(ID) FROM config_info where tenant_id like ?"; + Integer result = jt.queryForObject(sql, new Object[] {tenant}, Integer.class); +2:827,828c +3:827,828c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.tenantId.like(tenant)); +====1 +1:1143,1144c + String sql = " SELECT COUNT(ID) FROM config_info_beta "; + Integer result = jt.queryForObject(sql, Integer.class); +2:837c +3:837c + Long result = configInfoBetaRepository.count(); +====1 +1:1153,1154c + String sql = " SELECT COUNT(ID) FROM config_info_tag "; + Integer result = jt.queryForObject(sql, Integer.class); +2:846c +3:846c + Long result = configInfoTagRepository.count(); +====1 +1:1162,1165c + public List getTenantIdList(int page, int pageSize) { + String sql = "SELECT tenant_id FROM config_info WHERE tenant_id != '' GROUP BY tenant_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:854,864c +3:854,864c + public List getTenantIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("tenantId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1169,1172c + public List getGroupIdList(int page, int pageSize) { + String sql = "SELECT group_id FROM config_info WHERE tenant_id ='' GROUP BY group_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:868,878c +3:868,878c + public List getGroupIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("groupId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1178,1179c + String sql = " SELECT COUNT(ID) FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {dataId, group, tenantTmp}); +2:884,886c +3:884,886c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))); +====1 +1:1188,1213c + if (datumIds == null || datumIds.isEmpty()) { + return 0; + } + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder sql = new StringBuilder( + " SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ? and datum_id"); + if (isIn) { + sql.append(" in ("); + } else { + sql.append(" not in ("); + } + for (int i = 0, size = datumIds.size(); i < size; i++) { + if (i > 0) { + sql.append(", "); + } + sql.append("?"); + } + sql.append(")"); + + List objectList = Lists.newArrayList(dataId, group, tenantTmp); + objectList.addAll(datumIds); + Integer result = jt.queryForObject(sql.toString(), Integer.class, objectList.toArray()); + if (result == null) { + throw new IllegalArgumentException("aggrConfigInfoCount error"); + } + return result.intValue(); +2:895c +3:895c + return 0; +====1 +1:1228,1242c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5 " + + " FROM ( SELECT id FROM config_info WHERE tenant_id like ? ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, + new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:910c +3:910c + return null; +====1 +1:1247,1282c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String select = " SELECT data_id,group_id,app_name FROM ( " + + " SELECT id FROM config_info WHERE tenant_id LIKE ? ORDER BY id LIMIT ?, ? )" + + " g, config_info t WHERE g.id = t.id "; + + final int totalCount = configInfoCount(tenant); + int pageCount = totalCount / pageSize; + if (totalCount > pageSize * pageCount) { + pageCount++; + } + + if (pageNo > pageCount) { + return null; + } + + final Page page = new Page(); + page.setPageNumber(pageNo); + page.setPagesAvailable(pageCount); + page.setTotalCount(totalCount); + + try { + List result = jt + .query(select, new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, + // new Object[0], + CONFIG_KEY_ROW_MAPPER); + + for (ConfigKey item : result) { + page.getPageItems().add(item); + } + return page; + } catch (EmptyResultDataAccessException e) { + return page; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:915c +3:915c + return null; +====1 +1:1288,1300c + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,content,md5" + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) " + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:921c +3:921c + return null; +====1 +1:1305,1319c + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = " SELECT t.id,type,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + + List params = new ArrayList(); + + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, params.toArray(), pageNo, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:926c +3:926c + return null; +====1 +1:1324,1332c + String select = "SELECT id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,type from config_info where id > ? order by id asc limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(select, new Object[] {lastMaxId, 0, pageSize}, 1, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:931,940c +3:931,940c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(qConfigInfo.id.gt(lastMaxId), PageRequest.of(0, pageSize, Sort.by(Sort.Order.asc("id")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1337,1349c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_beta"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,beta_ips " + + " FROM ( SELECT id FROM config_info_beta ORDER BY id LIMIT ?,? )" + + " g, config_info_beta t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:945,952c +3:945,952c + org.springframework.data.domain.Page sPage = configInfoBetaRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoBetaWrapperMapStruct.INSTANCE.convertConfigInfoBetaWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1354,1366c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_tag"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,tag_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info_tag ORDER BY id LIMIT ?,? ) " + + "g, config_info_tag t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:957,964c +3:957,964c + org.springframework.data.domain.Page sPage = configInfoTagRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoTagWrapperMapStruct.INSTANCE.convertConfigInfoTagWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1372,1414c + // assert dataids group not null + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + // if dataids empty return empty list + if (CollectionUtils.isEmpty(dataIds)) { + return Collections.emptyList(); + } + + // Batch query limit + // The number of in is controlled within 100, the shorter the length of the SQL statement, the better + if (subQueryLimit > QUERY_LIMIT_SIZE) { + subQueryLimit = 50; + } + List result = new ArrayList(dataIds.size()); + + String sqlStart = "select data_id, group_id, tenant_id, app_name, content from config_info where group_id = ? and tenant_id = ? and data_id in ("; + String sqlEnd = ")"; + StringBuilder subQuerySql = new StringBuilder(); + + for (int i = 0; i < dataIds.size(); i += subQueryLimit) { + // dataids + List params = new ArrayList( + dataIds.subList(i, i + subQueryLimit < dataIds.size() ? i + subQueryLimit : dataIds.size())); + + for (int j = 0; j < params.size(); j++) { + subQuerySql.append("?"); + if (j != params.size() - 1) { + subQuerySql.append(","); + } + } + + // group + params.add(0, group); + params.add(1, tenantTmp); + + List r = this.jt + .query(sqlStart + subQuerySql.toString() + sqlEnd, params.toArray(), CONFIG_INFO_ROW_MAPPER); + + // assert not null + if (r != null && r.size() > 0) { + result.addAll(r); + } + } + return result; +2:970c +3:970c + return null; +====1 +1:1420,1463c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + if (StringUtils.isBlank(appName)) { + return this.findAllConfigInfo(pageNo, pageSize, tenantTmp); + } else { + return this.findConfigInfoByApp(pageNo, pageSize, tenantTmp, appName); + } + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + where += " and tenant_id like ? "; + params.add(generateLikeArgument(tenantTmp)); + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:976c +3:976c + return null; +====1 +1:1469,1562c + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + for (ConfigKey configInfo : configKeys) { + String dataId = configInfo.getDataId(); + String group = configInfo.getGroup(); + String appName = configInfo.getAppName(); + + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + return helper.fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:982c +3:982c + return null; +====1 +1:1572,1636c + PaginationHelper helper = createPaginationHelper(); + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info"; + StringBuilder where = new StringBuilder(" where "); + List params = new ArrayList(); + params.add(generateLikeArgument(tenantTmp)); + if (StringUtils.isNotBlank(configTags)) { + sqlCountRows = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id "; + sqlFetchRows = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join config_tags_relation b on a.id=b.id "; + + where.append(" a.tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and a.data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and a.group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and a.app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and a.content like ? "); + params.add(generateLikeArgument(content)); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + params.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and content like ? "); + params.add(generateLikeArgument(content)); + } + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:992,1008c +3:992,1008c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.like(tenant)); + } + if (StringUtils.isNotBlank(content)) { + booleanBuilder.and(qConfigInfo.content.like(content)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1642,1672c + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + throw new IOException("invalid param"); + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,content from config_info where "; + String where = " 1=1 and tenant_id='' "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1014c +3:1014c + return null; +====1 +1:1678,1691c + String sql = "SELECT id,data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; + + try { + return this.jt + .queryForObject(sql, new Object[] {dataId, group, tenantTmp, datumId}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + // EmptyResultDataAccessException, indicating that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); +2:1020,1026c +3:1020,1026c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); +====1 +1:1692a +2:1028,1036c +3:1028,1036c + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenantTmp)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } + + ConfigInfoAggrEntity configInfoAggrEntity = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggr(configInfoAggrEntity); +====1 +1:1697,1710c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "SELECT data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? ORDER BY datum_id"; + + try { + return this.jt.query(sql, new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1041c +3:1041c + return null; +====1 +1:1717,1730c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where data_id=? and " + + "group_id=? and tenant_id=? order by datum_id limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, new Object[] {dataId, group, tenantTmp}, sqlFetchRows, + new Object[] {dataId, group, tenantTmp, (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_AGGR_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1048,1058c +3:1048,1058c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + org.springframework.data.domain.Page sPage = configInfoAggrRepository.findAll( + qConfigInfoAggr.dataId.eq(dataId).and(qConfigInfoAggr.groupId.eq(group)) + .and(qConfigInfoAggr.tenantId.eq(tenantTmp)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.by("datumId")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggrList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1737,1831c + String sqlCountRows = "select count(*) from config_info_aggr where "; + String sqlFetchRows = "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + + for (ConfigKey configInfoAggr : configKeys) { + String dataId = configInfoAggr.getDataId(); + String group = configInfoAggr.getGroup(); + String appName = configInfoAggr.getAppName(); + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + Page result = helper + .fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_AGGR_ROW_MAPPER); + return result; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1065c +3:1065c + return null; +====1 +1:1836,1848c + String sql = "SELECT DISTINCT data_id, group_id, tenant_id FROM config_info_aggr"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_CHANGED_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1070,1071c +3:1070,1071c + List list = configInfoAggrRepository.findAllAggrGroup(); + return ConfigInfoChangedMapStruct.INSTANCE.convertConfigInfoChangedList(list); +====1 +1:1853,1864c + String sql = "SELECT datum_id FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND content = ? "; + + try { + return this.jt.queryForList(sql, new Object[] {dataId, groupId, content}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1076c +3:1076c + return null; +====1 +1:1869,1877c + try { + List> list = jt.queryForList( + "SELECT data_id, group_id, tenant_id, app_name, content, gmt_modified FROM config_info WHERE gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertChangeConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1081,1084c +3:1081,1084c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Iterable iterable = configInfoRepository + .findAll(qConfigInfo.gmtModified.goe(startTime).and(qConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList((List) iterable); +====1 +1:1884,1924c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_modified from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + if (!StringUtils.isBlank(tenantTmp)) { + where += " and tenant_id = ? "; + params.add(tenantTmp); + } + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (startTime != null) { + where += " and gmt_modified >=? "; + params.add(startTime); + } + if (endTime != null) { + where += " and gmt_modified <=? "; + params.add(endTime); + } + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + lastMaxId, CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1091c +3:1091c + return null; +====1 +1:1929,1937c + try { + List> list = jt.queryForList( + "SELECT DISTINCT data_id, group_id, tenant_id FROM his_config_info WHERE op_type = 'D' AND gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertDeletedConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1096,1100c +3:1096,1100c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository.findAll( + qHisConfigInfo.opType.eq("D").and(qHisConfigInfo.gmtModified.goe(startTime)) + .and(qHisConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList((List) iterable); +====1 +1:1943,1947c + final String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + final String tenantTmp = + StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + +2:1105a +3:1105a +====1 +1:1953,1960c + + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + + KeyHolder keyHolder = new GeneratedKeyHolder(); + + final String sql = + "INSERT INTO config_info(data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_create," + + "gmt_modified,c_desc,c_use,effect,type,c_schema) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; +2:1111,1120c +3:1111,1120c + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setGmtCreate(time); + configInfoEntity.setGmtModified(time); +====1 +1:1963,1991c + jt.update(new PreparedStatementCreator() { + @Override + public PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + PreparedStatement ps = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS); + ps.setString(1, configInfo.getDataId()); + ps.setString(2, configInfo.getGroup()); + ps.setString(3, tenantTmp); + ps.setString(4, appNameTmp); + ps.setString(5, configInfo.getContent()); + ps.setString(6, md5Tmp); + ps.setString(7, srcIp); + ps.setString(8, srcUser); + ps.setTimestamp(9, time); + ps.setTimestamp(10, time); + ps.setString(11, desc); + ps.setString(12, use); + ps.setString(13, effect); + ps.setString(14, type); + ps.setString(15, schema); + return ps; + } + }, keyHolder); + Number nu = keyHolder.getKey(); + if (nu == null) { + throw new IllegalArgumentException("insert config_info fail"); + } + return nu.longValue(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:1123,1124c +3:1123,1124c + return configInfoRepository.save(configInfoEntity).getId(); + } catch (Exception e) { +====1 +1:2008,2015c + try { + jt.update( + "INSERT INTO config_tags_relation(id,tag_name,tag_type,data_id,group_id,tenant_id) VALUES(?,?,?,?,?,?)", + configId, tagName, null, dataId, group, tenant); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1141,1147c +3:1141,1147c + ConfigTagsRelationEntity configTagsRelation = new ConfigTagsRelationEntity(); + configTagsRelation.setId(configId); + configTagsRelation.setTagName(tagName); + configTagsRelation.setDataId(dataId); + configTagsRelation.setGroupId(group); + configTagsRelation.setTenantId(tenant); + configTagsRelationRepository.save(configTagsRelation); +====1 +1:2020,2025c + try { + jt.update("DELETE FROM config_tags_relation WHERE id=?", id); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1152c +3:1152c + configTagsRelationRepository.findById(id).ifPresent(s -> configTagsRelationRepository.delete(s)); +====1 +1:2030,2040c + String sql = "SELECT tag_name FROM config_tags_relation WHERE tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1157c +3:1157c + return null; +====1 +1:2045,2055c + String sql = "SELECT tag_name FROM config_tags_relation WHERE data_id=? AND group_id=? AND tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {dataId, group, tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1162,1176c +3:1162,1176c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigTagsRelationEntity qConfigTagsRelation = QConfigTagsRelationEntity.configTagsRelationEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigTagsRelation.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigTagsRelation.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigTagsRelation.tenantId.eq(tenant)); + } + Iterable iterable = configTagsRelationRepository.findAll(booleanBuilder); + List result = new ArrayList<>(); + iterable.forEach(s -> result.add(s.getTagName())); + return result; +====1 +1:2061,2067c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + jt.update("DELETE FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, group, + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1182,1187c +3:1182,1187c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); +====1 +1:2068a +2:1189,1190c +3:1189,1190c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + configInfos.forEach(s -> configInfoRepository.delete(s)); +====1 +1:2076,2077c + StringBuilder sql = new StringBuilder(SQL_DELETE_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1198,1200c +3:1198,1200c + if (StringUtils.isBlank(ids)) { + return; + } +====1 +1:2081,2084c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1203a +3:1203a +====1 +1:2087,2093c + sql.append(") "); + try { + jt.update(sql.toString(), paramList.toArray()); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1206,1219c +3:1206,1219c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + for (Long id : paramList) { + configInfoRepository.deleteById(id); + } + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2099,2106c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String tagTmp = StringUtils.isBlank(tag) ? StringUtils.EMPTY : tag; + try { + jt.update("DELETE FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", dataId, + group, tenantTmp, tagTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1225,1234c +3:1225,1234c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); +====1 +1:2107a +2:1236,1251c +3:1236,1251c + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + Iterable configInfoTags = configInfoTagRepository.findAll(booleanBuilder); + configInfoTags.forEach(s -> configInfoTagRepository.delete(s)); + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2113,2115c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String tenantTmp = StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:1257,1258c +3:1257,1258c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:2122,2131c + try { + jt.update("UPDATE config_info SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?," + + "app_name=?,c_desc=?,c_use=?,effect=?,type=?,c_schema=? " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5Tmp, srcIp, srcUser, + time, appNameTmp, desc, use, effect, type, schema, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1265,1272c +3:1265,1272c + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setGmtModified(time); + configInfoRepository.save(configInfoEntity); +====1 +1:2139,2140c + StringBuilder sql = new StringBuilder(SQL_FIND_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1279a +3:1279a +====1 +1:2144,2147c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1282a +3:1282a +====1 +1:2150,2158c + sql.append(") "); + try { + return this.jt.query(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1285,1288c +3:1285,1288c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + List list = (List) configInfoRepository + .findAll(qConfigInfo.id.in(paramList)); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(list); +====1 +1:2163,2176c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAdvanceInfo configAdvance = this.jt.queryForObject( + "SELECT gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_ADVANCE_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1293,1314c +3:1293,1314c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAdvanceInfo configAdvance = ConfigAdvanceInfoMapStruct.INSTANCE.convertConfigAdvanceInfo(configInfo); + List configTagList = this.selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2178c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1315a +3:1315a +====1 +1:2180,2185c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1317c +3:1317c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2186a +2:1319c +3:1319c + return configAdvance; +====1 +1:2191,2206c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAllInfo configAdvance = this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5," + + "gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_ALL_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1324,1346c +3:1324,1346c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAllInfo configAdvance = ConfigAllInfoMapStruct.INSTANCE.convertConfigAllInfo(configInfo); + configAdvance.setGroup(configInfo.getGroupId()); + List configTagList = selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2208c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1347a +3:1347a +====1 +1:2210,2215c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1349c +3:1349c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2216a +2:1351c +3:1351c + return configAdvance; +====1 +1:2225,2233c + try { + jt.update( + "INSERT INTO his_config_info (id,data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_modified,op_type) " + + "VALUES(?,?,?,?,?,?,?,?,?,?,?)", id, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp, appNameTmp, configInfo.getContent(), md5Tmp, srcIp, srcUser, time, ops); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1360,1373c +3:1360,1373c + HisConfigInfoEntity hisConfigInfo = new HisConfigInfoEntity(); + hisConfigInfo.setId(id); + hisConfigInfo.setDataId(configInfo.getDataId()); + hisConfigInfo.setGroupId(configInfo.getGroup()); + hisConfigInfo.setAppName(appNameTmp); + hisConfigInfo.setContent(configInfo.getContent()); + hisConfigInfo.setMd5(md5Tmp); + hisConfigInfo.setGmtModified(time); + hisConfigInfo.setSrcUser(srcUser); + hisConfigInfo.setSrcIp(srcIp); + hisConfigInfo.setOpType(ops); + hisConfigInfo.setTenantId(tenantTmp); + hisConfigInfo.setGmtCreate(time); + hisConfigInfoRepository.save(hisConfigInfo); +====1 +1:2239,2255c + PaginationHelper helper = createPaginationHelper(); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from his_config_info where data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select nid,data_id,group_id,tenant_id,app_name,src_ip,src_user,op_type,gmt_create,gmt_modified from his_config_info " + + "where data_id = ? and group_id = ? and tenant_id = ? order by nid desc"; + + Page page = null; + try { + page = helper + .fetchPage(sqlCountRows, sqlFetchRows, new Object[] {dataId, group, tenantTmp}, pageNo, pageSize, + HISTORY_LIST_ROW_MAPPER); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG + .error("[list-config-history] error, dataId:{}, group:{}", new Object[] {dataId, group}, e); + throw e; + } +2:1379,1389c +3:1379,1389c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + org.springframework.data.domain.Page sPage = hisConfigInfoRepository.findAll( + qHisConfigInfo.dataId.eq(dataId).and(qHisConfigInfo.groupId.eq(group)) + .and(qHisConfigInfo.tenantId.eq(tenant)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("nid")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); +====1 +1:2262,2270c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "INSERT INTO app_configdata_relation_subs(data_id,group_id,app_name,gmt_modified) VALUES(?,?,?,?)", + dataId, group, appNameTmp, date); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1395a +3:1395a +====1 +1:2276,2284c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "UPDATE app_configdata_relation_subs SET gmt_modified=? WHERE data_id=? AND group_id=? AND app_name=?", + time, dataId, group, appNameTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1401c +3:1401c + +==== +1:2289,2300c + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[list-config-history] error, nid:{}", new Object[] {nid}, e); + throw e; + } + } + + @Override +2:1406,1411c + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + } + + @Override +3:1406,1424c + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + } + + @Override + public ConfigHistoryInfo detailPreviousConfigHistory(Long id) { + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = (select max(nid) from his_config_info where id = ?) "; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {id}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[detail-previous-config-history] error, id:{}", new Object[] {id}, e); + throw e; + } + } + + @Override +====1 +1:2303,2310c + try { + jt.update( + "INSERT INTO tenant_info(kp,tenant_id,tenant_name,tenant_desc,create_source,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?)", + kp, tenantId, tenantName, tenantDesc, createResoure, time, time); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1414,1422c +3:1427,1435c + TenantInfoEntity tenantInfo = new TenantInfoEntity(); + tenantInfo.setKp(kp); + tenantInfo.setTenantId(tenantId); + tenantInfo.setTenantName(tenantName); + tenantInfo.setTenantDesc(tenantDesc); + tenantInfo.setCreateSource(createResoure); + tenantInfo.setGmtCreate(time); + tenantInfo.setGmtModified(time); + tenantInfoRepository.save(tenantInfo); +====1 +1:2315,2322c + try { + jt.update( + "UPDATE tenant_info SET tenant_name = ?, tenant_desc = ?, gmt_modified= ? WHERE kp=? AND tenant_id=?", + tenantName, tenantDesc, System.currentTimeMillis(), kp, tenantId); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1427,1432c +3:1440,1445c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + tenantInfoRepository.findOne(qTenantInfo.kp.eq(kp).and(qTenantInfo.tenantId.eq(tenantId))).ifPresent(s -> { + s.setTenantName(tenantName); + s.setTenantDesc(tenantDesc); + tenantInfoRepository.save(s); + }); +====1 +1:2327,2338c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=?"; + try { + return this.jt.query(sql, new Object[] {kp}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1437,1438c +3:1450,1451c + List list = tenantInfoRepository.findByKp(kp); + return TenantInfoMapStruct.INSTANCE.convertTenantInfoList(list); +====1 +1:2343,2354c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=? AND tenant_id=?"; + try { + return jt.queryForObject(sql, new Object[] {kp, tenantId}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1443,1444c +3:1456,1457c + TenantInfoEntity tenantInfoEntity = tenantInfoRepository.findByKpAndTenantId(kp, tenantId); + return TenantInfoMapStruct.INSTANCE.convertTenantInfo(tenantInfoEntity); +====1 +1:2359,2364c + try { + jt.update("DELETE FROM tenant_info WHERE kp=? AND tenant_id=?", kp, tenantId); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1449,1450c +3:1462,1463c + tenantInfoRepository.findOne(QTenantInfoEntity.tenantInfoEntity.tenantId.eq(tenantId) + .and(QTenantInfoEntity.tenantInfoEntity.kp.eq(kp))).ifPresent(s -> tenantInfoRepository.delete(s)); +====1 +1:2418,2431c + String sqlCountRows = " SELECT COUNT(*) FROM config_info "; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,md5,type,gmt_modified FROM " + + "( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) g, config_info t WHERE g.id = t.id"; + PaginationHelper helper = createPaginationHelper(); + try { + Page page = helper + .fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_WRAPPER_ROW_MAPPER); + + return page.getPageItems(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1504c +3:1517c + return null; +====1 +1:2448,2458c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,type,gmt_modified,md5 FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1521,1525c +3:1534,1538c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + ConfigInfoEntity result = configInfoRepository.findOne( + qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))).orElse(null); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapper(result); +====1 +1:2463,2469c + String sql = String.format("select 1 from %s limit 1", tableName); + try { + jt.queryForObject(sql, Integer.class); + return true; + } catch (Throwable e) { + return false; + } +2:1530c +3:1543c + return true; +====1 +1:2518,2530c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList<>(); + if (!CollectionUtils.isEmpty(ids)) { + where.append(" id in ("); + for (int i = 0; i < ids.size(); i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(ids.get(i)); + } + where.append(") "); +2:1579,1582c +3:1592,1595c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (!org.springframework.util.CollectionUtils.isEmpty(ids)) { + booleanBuilder.and(qConfigInfo.id.in(ids)); +====1 +1:2532,2536c + where.append(" tenant_id=? "); + paramList.add(tenantTmp); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + paramList.add(generateLikeArgument(dataId)); +2:1584,1588c +3:1597,1601c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.like(dataId)); +====1 +1:2539,2540c + where.append(" and group_id=? "); + paramList.add(group); +2:1591c +3:1604c + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:2543,2544c + where.append(" and app_name=? "); + paramList.add(appName); +2:1594c +3:1607c + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:2547,2552c + try { + return this.jt.query(SQL_FIND_ALL_CONFIG_INFO + where, paramList.toArray(), CONFIG_ALL_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1597,1605c +3:1610,1618c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + List resultList = new ArrayList<>(); + configInfos.forEach(s -> { + ConfigAllInfo configAllInfo = new ConfigAllInfo(); + BeanUtils.copyProperties(s, configAllInfo); + configAllInfo.setGroup(s.getGroupId()); + resultList.add(configAllInfo); + }); + return resultList; +====1 +1:2643,2647c + Integer result = this.jt + .queryForObject(SQL_TENANT_INFO_COUNT_BY_TENANT_ID, new String[] {tenantId}, Integer.class); + if (result == null) { + return 0; + } +2:1696,1697c +3:1709,1710c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + Long result = tenantInfoRepository.count(qTenantInfo.tenantId.eq(tenantId)); diff --git a/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_HistoryController.java.txt b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_HistoryController.java.txt new file mode 100644 index 0000000000..f20395adc2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_HistoryController.java.txt @@ -0,0 +1,94 @@ +====3 +1:31,33c +2:31,33c + import javax.servlet.http.HttpServletRequest; + import javax.servlet.http.HttpServletResponse; + +3:30a +====2 +1:42c +3:39c + +2:42c + +====2 +1:45c +3:42c + +2:45c + +====3 +1:49,53c +2:49,53c + * @param dataId dataId string value. + * @param group group string value. + * @param tenant tenant string value. + * @param appName appName string value. + * @param pageNo pageNo string value. +3:46,50c + * @param dataId dataId string value. + * @param group group string value. + * @param tenant tenant string value. + * @param appName appName string value. + * @param pageNo pageNo string value. +====2 +1:60,66c +3:57,63c + @RequestParam("group") String group, // + @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant, + @RequestParam(value = "appName", required = false) String appName, + @RequestParam(value = "pageNo", required = false) Integer pageNo, + // + @RequestParam(value = "pageSize", required = false) Integer pageSize, // + ModelMap modelMap) { +2:60,66c + @RequestParam("group") String group, // + @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant, + @RequestParam(value = "appName", required = false) String appName, + @RequestParam(value = "pageNo", required = false) Integer pageNo, + // + @RequestParam(value = "pageSize", required = false) Integer pageSize, // + ModelMap modelMap) { +====2 +1:73c +3:70c + +2:73c + +====3 +1:75c +2:75c + * Query the detailed configuration history informations. +3:72,75c + * Query the detailed configuration history information. + * + * @param nid history_config_info nid + * @return history config info +==== +1:78,79c + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { +2:78,79c + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { +3:78c + public ConfigHistoryInfo getConfigHistoryInfo(@RequestParam("nid") Long nid) { +==== +1:82c + +2:82c + +3:81,93c + + /** + * Query previous config history information. + * + * @param id config_info id + * @return history config info + * @since 1.4.0 + */ + @GetMapping(value = "/previous") + public ConfigHistoryInfo getPreviousConfigHistoryInfo(@RequestParam("id") Long id) { + return persistService.detailPreviousConfigHistory(id); + } + diff --git a/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_MergeDatumService.java.txt b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_MergeDatumService.java.txt new file mode 100644 index 0000000000..154063ccc5 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_MergeDatumService.java.txt @@ -0,0 +1,35 @@ +====3 +1:30,31c +2:30,31c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +3:30,32c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.ApplicationUtils; + import com.alibaba.nacos.sys.utils.InetUtils; +====3 +1:109c +2:109c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIp()); +3:110c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIP()); +====3 +1:117c +2:117c + if (ApplicationUtils.getStandaloneMode()) { +3:118c + if (EnvUtil.getStandaloneMode()) { +==== +1:166,168c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn("[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); +2:166,169c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn( + "[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); +3:167,169c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIP(), null); + LOGGER.warn("[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); diff --git a/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_MergeTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_MergeTaskProcessor.java.txt new file mode 100644 index 0000000000..f8190d8c70 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_MergeTaskProcessor.java.txt @@ -0,0 +1,38 @@ +==== +1:20,21c + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:20c + import com.alibaba.nacos.common.task.AbstractDelayTask; +3:20,21c + import com.alibaba.nacos.common.task.NacosTask; + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:22a +2:22c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====3 +1:31c +2:31c + import com.alibaba.nacos.core.utils.InetUtils; +3:32c + import com.alibaba.nacos.sys.utils.InetUtils; +====3 +1:55c +2:55c + public boolean process(AbstractDelayTask task) { +3:56c + public boolean process(NacosTask task) { +====3 +1:87c +2:87c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +3:88c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), +====3 +1:101c +2:101c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +3:102c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_NotifyTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_NotifyTaskProcessor.java.txt new file mode 100644 index 0000000000..b34849e3c8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_NotifyTaskProcessor.java.txt @@ -0,0 +1,58 @@ +==== +1:20,21c + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:20c + import com.alibaba.nacos.common.task.AbstractDelayTask; +3:20,21c + import com.alibaba.nacos.common.task.NacosTask; + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:22a +2:22c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====3 +1:27,28c +2:27,28c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +3:28,29c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====3 +1:49c +2:49c + public boolean process(AbstractDelayTask task) { +3:50c + public boolean process(NacosTask task) { +====3 +1:76c +2:76c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +3:77c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====3 +1:78c +2:78c + .format(URL_PATTERN, serverIp, ApplicationUtils.getContextPath(), dataId, group); +3:79c + .format(URL_PATTERN, serverIp, EnvUtil.getContextPath(), dataId, group); +====3 +1:82c +2:82c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +3:83c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====3 +1:92c +2:92c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +3:93c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====3 +1:100c +2:100c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +3:101c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_application.properties.txt b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_application.properties.txt new file mode 100644 index 0000000000..9395a60678 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_application.properties.txt @@ -0,0 +1,85 @@ +====3 +1:40,41c +2:40,41c + # db.user=nacos + # db.password=nacos +3:40,41c + # db.user.0=nacos + # db.password.0=nacos +====3 +1:112c +2:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-fe/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +3:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-ui/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +==== +1:177a +2:178,210c + + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + +3:178,211c + + + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + diff --git a/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_pom.xml.txt new file mode 100644 index 0000000000..25aac30ad6 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/git_hires_merge/diff_pom.xml.txt @@ -0,0 +1,163 @@ +====3 +1:25c +2:25c + 1.4.0-SNAPSHOT +3:25c + 1.4.1-SNAPSHOT +====3 +1:39c +2:39c + nacos-all-1.4.0-SNAPSHOT +3:39c + nacos-all-1.4.1-SNAPSHOT +====3 +1:129c +2:129c + 2.1.16.RELEASE +3:129c + 2.1.17.RELEASE +====3 +1:131c +2:131c + 2.6 +3:130a +====1 +1:133c + 2.2 +2:133c +3:132c + 2.6 +====3 +1:144c +2:144c + 1.7.17 +3:142a +====1 +1:170a +2:171,177c +3:169,175c + 1.3.2.beta1 + 1.3.2.beta1 + 1.3.1.Final + 19.3.0.0 + 4.2.1 + 3.4.1 + 1.18.12 +====1 +1:279a +2:287,289c +3:285,287c + **/com/alibaba/nacos/config/server/modules/entity/*.java + **/com/alibaba/nacos/config/server/modules/mapstruct/*.java + **/com/alibaba/nacos/config/server/configuration/datasource/DynamicDataSource.java +==== +1:307c + **/istio/model/**,**/nacos/test/** +2:317c + **/istio/model/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** +3:315c + **/istio/model/**,**/consistency/entity/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** +====3 +1:341a +2:351a +3:350c + /console-ui/** +====3 +1:553a +2:563a +3:563,581c + + remove-test-data + + + + org.apache.maven.plugins + maven-clean-plugin + + false + + + ${user.home}/nacos/data + + + + + + + +====3 +1:581a +2:591a +3:610c + sys +====3 +1:688a +2:698a +3:718,722c + + ${project.groupId} + nacos-sys + ${project.version} + +====3 +1:712,717c +2:722,727c + + commons-lang + commons-lang + ${commons-lang.version} + + +3:745a +====3 +1:817,822c +2:827,832c + + com.ning + async-http-client + ${async-http-client.version} + + +3:844a +====1 +1:1027a +2:1038,1075c +3:1050,1087c + + + org.mapstruct + mapstruct-jdk8 + ${mapstruct.version} + + + + org.mapstruct + mapstruct-processor + ${mapstruct.version} + + + + org.projectlombok + lombok + true + ${lombok.version} + + + + com.querydsl + querydsl-jpa + ${querydsl.version} + + + + com.zaxxer + HikariCP + ${hikariCP.version} + + + + com.oracle.ojdbc + ojdbc8 + ${ojdbc.version} + + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_AsyncNotifyService.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_AsyncNotifyService.java.txt new file mode 100644 index 0000000000..f5fe26eb1d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_AsyncNotifyService.java.txt @@ -0,0 +1,83 @@ +====1 +1:35,36c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:35,36c +3:35,36c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:107c + +2:107c +3:107c + +====1 +1:130c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +2:130c +3:130c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +====2 +1:135a +3:135a +2:136,143c + <<<<<<< HEAD + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, + String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); + ||||||| a41d209d5 + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); + ======= +==== +1:137c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +2:145,146c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); + >>>>>>> TEMP_RIGHT_BRANCH +3:137c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====1 +1:157c + +2:166c +3:157c + +====1 +1:171c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +2:180c +3:171c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +====1 +1:177c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +2:186c +3:177c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +====1 +1:199c + InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +2:208c +3:199c + InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +====1 +1:262c + this.url = MessageFormat.format(URL_PATTERN, target, ApplicationUtils.getContextPath(), dataId, group); +2:271c +3:262c + this.url = MessageFormat.format(URL_PATTERN, target, EnvUtil.getContextPath(), dataId, group); +====1 +1:265c + .format(URL_PATTERN_TENANT, target, ApplicationUtils.getContextPath(), dataId, group, tenant); +2:274c +3:265c + .format(URL_PATTERN_TENANT, target, EnvUtil.getContextPath(), dataId, group, tenant); +====1 +1:312c + } +\ No newline at end of file +2:321c +3:312c + } diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_ConfigController.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_ConfigController.java.txt new file mode 100644 index 0000000000..c1619171c1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_ConfigController.java.txt @@ -0,0 +1,193 @@ +====1 +1:18a +2:19c +3:19c + import com.alibaba.nacos.api.config.ConfigType; +====1 +1:22a +2:24c +3:24c + import com.alibaba.nacos.common.model.RestResultUtils; +==== +1:34a +2:37,38c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; + <<<<<<< HEAD +3:37c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +====2 +1:35a +3:38a +2:40,43c + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.result.ResultBuilder; + ======= + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:38c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +2:45a +3:40a +====1 +1:45a +2:53c +3:48c + import com.alibaba.nacos.common.utils.NamespaceUtil; +====1 +1:48c + import com.alibaba.nacos.core.utils.InetUtils; +2:56c +3:51c + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:93,94c + private static final String NAMESPACE_PUBLIC_KEY = "public"; + +2:100a +3:95a +====1 +1:137a +2:144,147c +3:139,142c + //check type + if (!ConfigType.isValidType(type)) { + type = ConfigType.getDefaultType().getType(); + } +====1 +1:178c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:188c +3:183c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:199c + tenant = processTenant(tenant); +2:209c +3:204c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====1 +1:283c + return ResultBuilder.buildSuccessResult(true); +2:293c +3:288c + return RestResultUtils.success(true); +====1 +1:472c + tenant = processTenant(tenant); +2:482c +3:477c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====1 +1:527c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:537c +3:532c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:530,534c + if (StringUtils.isNotBlank(namespace)) { + if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); + } +2:540,543c +3:535,538c + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====1 +1:535a +2:545c +3:540c + +====1 +1:548c + return ResultBuilder.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +2:558c +3:553c + return RestResultUtils.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +====1 +1:560c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +2:570c +3:565c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +====1 +1:584c + return ResultBuilder.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +2:594c +3:589c + return RestResultUtils.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +====1 +1:588c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:598c +3:593c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:601c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:611c +3:606c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:604c + return ResultBuilder.buildSuccessResult("导入成功", saveResult); +2:614c +3:609c + return RestResultUtils.success("导入成功", saveResult); +====1 +1:628c + return ResultBuilder.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +2:638c +3:633c + return RestResultUtils.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +====1 +1:631,634c + + if (NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(namespace)) { + namespace = ""; + } else if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { +2:641,643c +3:636,638c + + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { +====1 +1:636c + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +2:645c +3:640c + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====1 +1:650c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:659c +3:654c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:674c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:683c +3:678c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:687c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:696c +3:691c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:690,697c + return ResultBuilder.buildSuccessResult("Clone Completed Successfully", saveResult); + } + + private String processTenant(String tenant) { + if (StringUtils.isEmpty(tenant) || NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(tenant)) { + return ""; + } + return tenant; +2:699c +3:694c + return RestResultUtils.success("Clone Completed Successfully", saveResult); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_DiskUtils.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_DiskUtils.java.txt new file mode 100644 index 0000000000..edbfb2552d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_DiskUtils.java.txt @@ -0,0 +1,24 @@ +356,364d355 +< <<<<<<< HEAD:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileOutputStream fos = new FileOutputStream( +< outputFile); final CheckedOutputStream cos = new CheckedOutputStream(fos, +< checksum); final ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(cos))) { +< ||||||| a41d209d5:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileOutputStream fos = new FileOutputStream( +< outputFile); final CheckedOutputStream cos = new CheckedOutputStream(fos, checksum); +< final ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(cos))) { +< ======= +368d358 +< >>>>>>> TEMP_RIGHT_BRANCH:sys/src/main/java/com/alibaba/nacos/sys/utils/DiskUtils.java +407,415d396 +< <<<<<<< HEAD:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileInputStream fis = new FileInputStream( +< sourceFile); final CheckedInputStream cis = new CheckedInputStream(fis, +< checksum); final ZipInputStream zis = new ZipInputStream(new BufferedInputStream(cis))) { +< ||||||| a41d209d5:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileInputStream fis = new FileInputStream( +< sourceFile); final CheckedInputStream cis = new CheckedInputStream(fis, checksum); +< final ZipInputStream zis = new ZipInputStream(new BufferedInputStream(cis))) { +< ======= +419d399 +< >>>>>>> TEMP_RIGHT_BRANCH:sys/src/main/java/com/alibaba/nacos/sys/utils/DiskUtils.java diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_EmbeddedRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_EmbeddedRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..465add68ba --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_EmbeddedRolePersistServiceImpl.java.txt @@ -0,0 +1,186 @@ +====1 +1:21c + import com.alibaba.nacos.config.server.service.repository.embedded.EmbeddedStoragePersistServiceImpl; +2:20a +3:20a +====1 +1:23a +2:23c +3:23c + import com.alibaba.nacos.config.server.service.repository.embedded.EmbeddedStoragePersistServiceImpl; +====1 +1:30a +2:31c +3:31c + import java.util.Collections; +====2 +1:43c +3:44c + +2:44c + +====2 +1:46c +3:47c + +2:47c + +====2 +1:49c +3:50c + +2:50c + +====2 +1:51c +3:52c + +2:52c + +====2 +1:53c +3:54c + +2:54c + +====2 +1:56c +3:57c + +2:57c + +====2 +1:58c +3:59c + +2:59c + +====2 +1:68c +3:69c + +2:69c + +====2 +1:70c +3:71c + +2:71c + +====2 +1:72c +3:73c + +2:73c + +====2 +1:74c +3:75c + +2:75c + +==== +1:77,80c + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { +2:78,95c + <<<<<<< HEAD + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + ||||||| a41d209d5 + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + ======= + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { + >>>>>>> TEMP_RIGHT_BRANCH +3:78,83c + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { +====2 +1:82a +3:85a +2:98,104c + <<<<<<< HEAD + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + ||||||| a41d209d5 + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + ======= +==== +1:84c + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, +2:106,107c + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, + >>>>>>> TEMP_RIGHT_BRANCH +3:87c + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, +====2 +1:86c +3:89c + +2:109c + +====2 +1:88c +3:91c + +2:111c + +====2 +1:96c +3:99c + +2:119c + +====2 +1:98c +3:101c + +2:121c + +====2 +1:106c +3:109c + +2:129c + +====2 +1:121c +3:124c + +2:144c + +====2 +1:137c +3:140c + +2:160c + +====2 +1:141c +3:144c + List users = databaseOperate.queryMany(sql, new String[] {"%" + role + "%"}, String.class); +2:164c + List users = databaseOperate.queryMany(sql, new String[]{"%" + role + "%"}, String.class); +====2 +1:144c +3:147c + +2:167c + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_ExternalDataSourceServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_ExternalDataSourceServiceImpl.java.txt new file mode 100644 index 0000000000..ed6e037647 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_ExternalDataSourceServiceImpl.java.txt @@ -0,0 +1,182 @@ +====1 +1:19,39c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import javax.sql.DataSource; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + +2:18a +3:18a +==== +1:40a +2:20,45c + <<<<<<< HEAD + import com.alibaba.nacos.common.utils.StringUtils; + import com.alibaba.nacos.config.server.monitor.MetricsMonitor; + import com.alibaba.nacos.config.server.utils.PropertyUtil; + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.zaxxer.hikari.HikariDataSource; + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + ||||||| a41d209d5 + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import javax.sql.DataSource; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + ======= + import com.alibaba.nacos.common.utils.IPUtil; +3:20c + import com.alibaba.nacos.common.utils.IPUtil; +====1 +1:45c + import com.alibaba.nacos.core.utils.ApplicationUtils; +2:50c +3:25c + import com.alibaba.nacos.sys.env.EnvUtil; +==== +1:46a +2:52,80c + >>>>>>> TEMP_RIGHT_BRANCH + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + <<<<<<< HEAD + import java.util.regex.Matcher; + import java.util.regex.Pattern; + ||||||| a41d209d5 + import com.alibaba.nacos.common.utils.ConvertUtils; + import com.alibaba.nacos.common.utils.StringUtils; + import com.alibaba.nacos.config.server.monitor.MetricsMonitor; + import com.alibaba.nacos.config.server.utils.ConfigExecutor; + import com.alibaba.nacos.config.server.utils.PropertyUtil; + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.zaxxer.hikari.HikariDataSource; + ======= + import java.util.concurrent.TimeUnit; + >>>>>>> TEMP_RIGHT_BRANCH + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +3:27,41c + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +====1 +1:55,58c + private static final Logger LOGGER = LoggerFactory.getLogger(ExternalDataSourceServiceImpl.class); + + private static final String JDBC_DRIVER_NAME = "com.mysql.cj.jdbc.Driver"; + +2:88a +3:49a +====1 +1:86,87c + private static Pattern ipPattern = Pattern.compile("\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}"); + +2:115a +3:76a +====1 +1:120,122c + + ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +2:148,150c +3:109,111c + + // ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + // ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +====1 +1:130c + .build(ApplicationUtils.getEnvironment(), (dataSource) -> { +2:158c +3:119c + .build(EnvUtil.getEnvironment(), (dataSource) -> { +====1 +1:193c + return "DOWN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +2:221c +3:182c + return "DOWN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====1 +1:196c + return "WARN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +2:224c +3:185c + return "WARN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====1 +1:204,217c + private String getIpFromUrl(String url) { + + Matcher m = ipPattern.matcher(url); + if (m.find()) { + return m.group(); + } + + return ""; + } + + static String defaultIfNull(String value, String defaultValue) { + return null == value ? defaultValue : value; + } + +2:231a +3:192a +====1 +1:272c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +2:286c +3:247c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); +====1 +1:275c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +2:289c +3:250c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_ExternalPermissionPersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_ExternalPermissionPersistServiceImpl.java.txt new file mode 100644 index 0000000000..3c95b4eb48 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_ExternalPermissionPersistServiceImpl.java.txt @@ -0,0 +1,215 @@ +==== +1:21c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +2:21,31c + <<<<<<< HEAD + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + ======= +3:20a +====1 +1:22a +2:33c +3:22c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +==== +1:24a +2:36c + >>>>>>> TEMP_RIGHT_BRANCH +3:25,28c + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; +====1 +1:27,28c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; +2:39c +3:31c + import org.springframework.data.domain.PageRequest; +====2 +1:30a +3:33a +2:42,49c + <<<<<<< HEAD + ||||||| a41d209d5 + import javax.annotation.PostConstruct; + import java.util.ArrayList; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.PERMISSION_ROW_MAPPER; + + ======= +====1 +1:32a +2:52,53c +3:36,37c + import java.util.Collections; + import java.util.List; +====2 +1:35a +3:40a +2:57c + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:46,53c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:68c +3:51c + private PermissionsRepository permissionsRepository; +==== +1:56,83c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role='" + role + "' "; + + if (StringUtils.isBlank(role)) { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:71,140c + <<<<<<< HEAD + org.springframework.data.domain.Page sPage = permissionsRepository + .findAll(QPermissionsEntity.permissionsEntity.role.eq(role), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(PermissionsMapStruct.INSTANCE.convertPermissionInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + ||||||| a41d209d5 + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role='" + role + "' "; + + if (StringUtils.isBlank(role)) { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + ======= + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(role)) { + params = Collections.singletonList(role); + } else { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:54,61c + org.springframework.data.domain.Page sPage = permissionsRepository + .findAll(QPermissionsEntity.permissionsEntity.role.eq(role), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(PermissionsMapStruct.INSTANCE.convertPermissionInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:94,102c + + String sql = "INSERT into permissions (role, resource, action) VALUES (?, ?, ?)"; + + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:151,152c +3:72,73c + + permissionsRepository.save(new PermissionsEntity(role, resource, action)); +====1 +1:113,120c + + String sql = "DELETE from permissions WHERE role=? and resource=? and action=?"; + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:163,167c +3:84,88c + + QPermissionsEntity qPermissions = QPermissionsEntity.permissionsEntity; + permissionsRepository.findOne( + qPermissions.role.eq(role).and(qPermissions.resource.eq(resource)).and(qPermissions.action.eq(action))) + .ifPresent(p -> permissionsRepository.delete(p)); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_ExternalRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_ExternalRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..b2b75323b7 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_ExternalRolePersistServiceImpl.java.txt @@ -0,0 +1,283 @@ +==== +1:21c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +2:21,31c + <<<<<<< HEAD + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + ======= +3:20a +====1 +1:22a +2:33c +3:22c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +==== +1:24a +2:36c + >>>>>>> TEMP_RIGHT_BRANCH +3:25,28c + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; +====1 +1:27,29c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.RowMapper; +2:39c +3:31c + import org.springframework.data.domain.PageRequest; +====2 +1:31a +3:33a +2:42,48c + <<<<<<< HEAD + ||||||| a41d209d5 + import javax.annotation.PostConstruct; + import java.sql.ResultSet; + import java.sql.SQLException; + import java.util.ArrayList; + ======= +==== +1:35a +2:53,54c + import java.util.Collections; + >>>>>>> TEMP_RIGHT_BRANCH +3:38c + import java.util.Collections; +====1 +1:36a +2:56c +3:40c + import java.util.stream.Collectors; +====1 +1:38c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.ROLE_INFO_ROW_MAPPER; +2:57a +3:41a +====1 +1:50,57c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:69c +3:53c + private RolesRepository rolesRepository; +====1 +1:61,81c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from (select distinct role from roles) roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " 1=1 "; + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + return pageInfo; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:73,80c +3:57,64c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +==== +1:86,104c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + where = " 1=1 "; + } + + try { + return helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:85,135c + <<<<<<< HEAD + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(QRolesEntity.rolesEntity.username.eq(username), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + ||||||| a41d209d5 + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + where = " 1=1 "; + } + + try { + return helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + ======= + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { + where = " 1=1 "; + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:69,76c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(QRolesEntity.rolesEntity.username.eq(username), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:110c + * @param role role string value. +2:141c +3:82c + * @param role role string value. +====1 +1:115,122c + String sql = "INSERT into roles (role, username) VALUES (?, ?)"; + + try { + jt.update(sql, role, userName); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:146c +3:87c + rolesRepository.save(new RolesEntity(userName, role)); +====1 +1:131,137c + String sql = "DELETE from roles WHERE role=?"; + try { + jt.update(sql, role); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:155,156c +3:96,97c + Iterable iterable = rolesRepository.findAll(QRolesEntity.rolesEntity.role.eq(role)); + rolesRepository.deleteAll(iterable); +====1 +1:143c + * @param role role string value. +2:162c +3:103c + * @param role role string value. +====1 +1:147,153c + String sql = "DELETE from roles WHERE role=? and username=?"; + try { + jt.update(sql, role, username); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:166,168c +3:107,109c + QRolesEntity qRoles = QRolesEntity.rolesEntity; + rolesRepository.findOne(qRoles.role.eq(role).and(qRoles.username.eq(username))) + .ifPresent(s -> rolesRepository.delete(s)); +==== +1:158,160c + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[]{role}, String.class); + return users; +2:173,185c + <<<<<<< HEAD + List rolesEntities = (List) rolesRepository + .findAll(QRolesEntity.rolesEntity.role.like(role)); + return rolesEntities.stream().map(s -> s.getRole()).collect(Collectors.toList()); + ||||||| a41d209d5 + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[]{role}, String.class); + return users; + ======= + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[] {role}, String.class); + return users; + >>>>>>> TEMP_RIGHT_BRANCH +3:114,116c + List rolesEntities = (List) rolesRepository + .findAll(QRolesEntity.rolesEntity.role.like(role)); + return rolesEntities.stream().map(s -> s.getRole()).collect(Collectors.toList()); +====1 +1:163,172c + private static final class RoleInfoRowMapper implements RowMapper { + + @Override + public RoleInfo mapRow(ResultSet rs, int rowNum) throws SQLException { + RoleInfo roleInfo = new RoleInfo(); + roleInfo.setRole(rs.getString("role")); + roleInfo.setUsername(rs.getString("username")); + return roleInfo; + } + } +2:187a +3:118a diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_ExternalStoragePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_ExternalStoragePersistServiceImpl.java.txt new file mode 100644 index 0000000000..44c1365407 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_ExternalStoragePersistServiceImpl.java.txt @@ -0,0 +1,3017 @@ +====1 +1:41,42c + import com.alibaba.nacos.config.server.service.datasource.DataSourceService; + import com.alibaba.nacos.config.server.service.datasource.DynamicDataSource; +2:41,74c +3:41,74c + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.HisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.QHisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QTenantInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.TenantInfoEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAdvanceInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAllInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigHistoryInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4BetaMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4TagMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoAggrMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoBetaWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoChangedMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoEntityMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoTagWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.TenantInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoAggrRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoBetaRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoTagRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigTagsRelationRepository; + import com.alibaba.nacos.config.server.modules.repository.HisConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.TenantInfoRepository; +====1 +1:48c + import com.google.common.collect.Lists; +2:80,81c +3:80,81c + import com.querydsl.core.BooleanBuilder; + import lombok.extern.slf4j.Slf4j; +====1 +1:50a +2:84,85c +3:84,85c + import org.springframework.beans.BeanUtils; + import org.springframework.beans.factory.annotation.Autowired; +====1 +1:55c + import org.springframework.dao.IncorrectResultSizeDataAccessException; +2:90,92c +3:90,92c + import org.springframework.data.domain.PageRequest; + import org.springframework.data.domain.Sort; + import org.springframework.data.jpa.domain.Specification; +====1 +1:57,61c + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.PreparedStatementCreator; + import org.springframework.jdbc.core.PreparedStatementSetter; + import org.springframework.jdbc.support.GeneratedKeyHolder; + import org.springframework.jdbc.support.KeyHolder; +2:93a +3:93a +====1 +1:70c + import javax.annotation.PostConstruct; +2:102,105c +3:102,105c + import javax.persistence.criteria.CriteriaBuilder; + import javax.persistence.criteria.CriteriaQuery; + import javax.persistence.criteria.Predicate; + import javax.persistence.criteria.Root; +====1 +1:72,75c + import java.sql.Connection; + import java.sql.PreparedStatement; + import java.sql.SQLException; + import java.sql.Statement; +2:106a +3:106a +====1 +1:81a +2:113c +3:113c + import java.util.stream.Collectors; +====1 +1:83,97c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ADVANCE_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ALL_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4TAG_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_AGGR_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BASE_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_CHANGED_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_KEY_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_DETAIL_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_LIST_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.TENANT_INFO_ROW_MAPPER; +2:114a +3:114a +====1 +1:104a +2:122c +3:122c + @Slf4j +====1 +1:110c + private DataSourceService dataSourceService; +2:128,129c +3:128,129c + @Autowired + private ConfigInfoRepository configInfoRepository; +====1 +1:112c + private static final String SQL_FIND_ALL_CONFIG_INFO = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,c_schema from config_info"; +2:131,132c +3:131,132c + @Autowired + private ConfigInfoBetaRepository configInfoBetaRepository; +====1 +1:114c + private static final String SQL_TENANT_INFO_COUNT_BY_TENANT_ID = "select count(1) from tenant_info where tenant_id = ?"; +2:134,135c +3:134,135c + @Autowired + private ConfigInfoTagRepository configInfoTagRepository; +====1 +1:116c + private static final String SQL_FIND_CONFIG_INFO_BY_IDS = "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5 FROM config_info WHERE "; +2:137,138c +3:137,138c + @Autowired + private ConfigTagsRelationRepository configTagsRelationRepository; +====1 +1:118c + private static final String SQL_DELETE_CONFIG_INFO_BY_IDS = "DELETE FROM config_info WHERE "; +2:140,141c +3:140,141c + @Autowired + private HisConfigInfoRepository hisConfigInfoRepository; +====1 +1:120c + private static final String PATTERN_STR = "*"; +2:143,144c +3:143,144c + @Autowired + private TenantInfoRepository tenantInfoRepository; +====1 +1:122c + private static final int QUERY_LIMIT_SIZE = 50; +2:146,147c +3:146,147c + @Autowired + private ConfigInfoAggrRepository configInfoAggrRepository; +====1 +1:124,126c + protected JdbcTemplate jt; + + protected TransactionTemplate tjt; +2:149,150c +3:149,150c + @Autowired + private TransactionTemplate tjt; +====1 +1:133,171c + /** + * init datasource. + */ + @PostConstruct + public void init() { + dataSourceService = DynamicDataSource.getInstance().getDataSource(); + + jt = getJdbcTemplate(); + tjt = getTransactionTemplate(); + } + + public boolean checkMasterWritable() { + return dataSourceService.checkMasterWritable(); + } + + public void setBasicDataSourceService(DataSourceService dataSourceService) { + this.dataSourceService = dataSourceService; + } + + public synchronized void reload() throws IOException { + this.dataSourceService.reload(); + } + + /** + * For unit testing. + */ + public JdbcTemplate getJdbcTemplate() { + return this.dataSourceService.getJdbcTemplate(); + } + + public TransactionTemplate getTransactionTemplate() { + return this.dataSourceService.getTransactionTemplate(); + } + + @SuppressWarnings("checkstyle:AbbreviationAsWordInName") + public String getCurrentDBUrl() { + return this.dataSourceService.getCurrentDbUrl(); + } + +2:156a +3:156a +====1 +1:174c + return new ExternalStoragePaginationHelperImpl(jt); +2:159c +3:159c + return null; +====1 +1:182,191c + boolean result = tjt.execute(status -> { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:167,183c +3:167,183c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfoEntity.getGroupId(), + configInfoEntity.getTenantId()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; + } + return Boolean.TRUE; +====1 +1:193c + return Boolean.TRUE; +2:184a +3:184a +====1 +1:202c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:193c +3:193c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:204,207c + jt.update("INSERT INTO config_info_beta(data_id,group_id,tenant_id,app_name,content,md5,beta_ips,src_ip," + + "src_user,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, appNameTmp, configInfo.getContent(), md5, betaIps, srcIp, srcUser, + time, time); +2:195,207c +3:195,207c + ConfigInfoBetaEntity configInfoBeta = new ConfigInfoBetaEntity(); + configInfoBeta.setDataId(configInfo.getDataId()); + configInfoBeta.setGroupId(configInfo.getGroup()); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setBetaIps(betaIps); + configInfoBeta.setMd5(md5); + configInfoBeta.setGmtCreate(time); + configInfoBeta.setGmtModified(time); + configInfoBeta.setSrcUser(srcUser); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setTenantId(tenantTmp); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:209c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:209c +3:209c + log.error("[db-error] " + e.toString(), e); +====1 +1:220c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:219a +3:219a +====1 +1:222,226c + jt.update( + "INSERT INTO config_info_tag(data_id,group_id,tenant_id,tag_id,app_name,content,md5,src_ip,src_user," + + "gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, tagTmp, appNameTmp, configInfo.getContent(), md5, srcIp, srcUser, + time, time); +2:221,234c +3:221,234c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoTagEntity configInfoTag = new ConfigInfoTagEntity(); + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:228c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:236c +3:236c + log.error("[db-error] " + e.toString(), e); +====1 +1:236,254c + boolean result = tjt.execute(status -> { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + /* + If the appName passed by the user is not empty, use the persistent user's appName, + otherwise use db; when emptying appName, you need to pass an empty string + */ + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // delete all tags and then recreate + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); +2:244,268c +3:244,268c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + // 用户传过来的appName不为空,则用持久化用户的appName,否则用db的;清空appName的时候需要传空串 + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + configInfo.setId(oldConfigInfo.getId()); + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // 删除所有tag,然后再重新创建 + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); + } + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:256,259c + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:270c +3:270c + return Boolean.TRUE; +====1 +1:261c + return Boolean.TRUE; +2:271a +3:271a +====1 +1:268c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); +2:277a +3:277a +====1 +1:270,275c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + try { + jt.update( + "UPDATE config_info_beta SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5, srcIp, srcUser, + time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp); +2:279,300c +3:279,300c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(configInfo.getDataId())) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(configInfo.getDataId())); + } + if (StringUtils.isNotBlank(configInfo.getGroup())) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(configInfo.getGroup())); + } + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenantTmp)); + } + ConfigInfoBetaEntity configInfoBeta = configInfoBetaRepository.findOne(booleanBuilder).orElse(null); + try { + String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setMd5(md5); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setSrcUser(srcUser); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:277c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:302c +3:302c + log.error("[db-error] " + e.toString(), e); +====1 +1:288,293c + try { + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + jt.update( + "UPDATE config_info_tag SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", configInfo.getContent(), md5, + srcIp, srcUser, time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp, tagTmp); +2:313,331c +3:313,331c + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + ConfigInfoTagEntity configInfoTag = configInfoTagRepository.findOne( + qConfigInfoTag.dataId.eq(configInfo.getDataId()).and(qConfigInfoTag.groupId.eq(configInfo.getGroup())) + .and(qConfigInfoTag.tenantId.eq(tenantTmp)).and(qConfigInfoTag.tagId.eq(tagTmp))) + .orElse(new ConfigInfoTagEntity()); + try { + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:295c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:333c +3:333c + log.error("[db-error] " + e.toString(), e); +====1 +1:323,330c + try { + jt.update( + "UPDATE config_info SET md5 = ? WHERE data_id=? AND group_id=? AND tenant_id=? AND gmt_modified=?", + md5, dataId, group, tenantTmp, lastTime); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:361,378c +3:361,378c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (lastTime != null) { + booleanBuilder.and(qConfigInfo.gmtModified.eq(lastTime)); + } + configInfoRepository.findOne(booleanBuilder).ifPresent(config -> { + config.setMd5(md5); + configInfoRepository.save(config); + }); +====1 +1:416,421c + tjt.execute(status -> { + try { + ConfigInfo configInfo = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo != null) { + jt.update("DELETE FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, + group, tenantTmp); +2:464,474c +3:464,474c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo4Beta configInfo4Beta = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo4Beta != null) { + configInfoBetaRepository.deleteById(configInfo4Beta.getId()); + } + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:423,425c + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:476c +3:476c + return Boolean.TRUE; +====1 +1:427c + return Boolean.TRUE; +2:477a +3:477a +====1 +1:439,442c + String select = "SELECT content FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + String insert = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + String update = "UPDATE config_info_aggr SET content = ? , gmt_modified = ? WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + +2:489,502c +3:489,502c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } +====1 +1:445,446c + String dbContent = jt + .queryForObject(select, new Object[] {dataId, group, tenantTmp, datumId}, String.class); +2:505c +3:505c + ConfigInfoAggrEntity result = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); +====1 +1:448c + if (dbContent != null && dbContent.equals(content)) { +2:507c +3:507c + if (result.getContent() != null && result.getContent().equals(content)) { +====1 +1:451c + return jt.update(update, content, now, dataId, group, tenantTmp, datumId) > 0; +2:510,513c +3:510,513c + result.setContent(content); + result.setGmtModified(now); + configInfoAggrRepository.save(result); + return true; +====1 +1:454c + return jt.update(insert, dataId, group, tenantTmp, datumId, appNameTmp, content, now) > 0; +2:516,526c +3:516,526c + ConfigInfoAggrEntity configInfoAggrEntity = new ConfigInfoAggrEntity(); + configInfoAggrEntity.setDataId(dataId); + configInfoAggrEntity.setGroupId(group); + configInfoAggrEntity.setDatumId(datumId); + configInfoAggrEntity.setContent(content); + configInfoAggrEntity.setGmtModified(now); + configInfoAggrEntity.setAppName(appNameTmp); + configInfoAggrEntity.setTenantId(tenantTmp); + configInfoAggrRepository.save(configInfoAggrEntity); + return true; + +====1 +1:465,466c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; +2:536a +3:536a +====1 +1:468,482c + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index++, tenantTmp); + ps.setString(index, datumId); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:537a +3:537a +====1 +1:487,502c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=?"; + + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index, tenantTmp); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:542,545c +3:542,545c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); +====1 +1:503a +2:547,553c +3:547,553c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + configInfoAggrRepository.findOne(booleanBuilder).ifPresent(aggr -> configInfoAggrRepository.delete(aggr)); +====1 +1:509,523c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final StringBuilder datumString = new StringBuilder(); + for (String datum : datumList) { + datumString.append("'").append(datum).append("',"); + } + datumString.deleteCharAt(datumString.length() - 1); + final String sql = + "delete from config_info_aggr where data_id=? and group_id=? and tenant_id=? and datum_id in (" + + datumString.toString() + ")"; + try { + jt.update(sql, dataId, group, tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:558a +3:558a +====1 +1:529,536c + String sql = "delete from his_config_info where gmt_modified < ? limit ?"; + PaginationHelper helper = createPaginationHelper(); + try { + helper.updateLimit(sql, new Object[] {startTime, limitSize}); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:564,567c +3:564,567c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository + .findAll(qHisConfigInfo.gmtModified.lt(startTime), PageRequest.of(0, limitSize)); + hisConfigInfoRepository.deleteAll(iterable); +====1 +1:541,542c + String sql = "SELECT COUNT(*) FROM his_config_info WHERE gmt_modified < ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {startTime}); +2:572,573c +3:572,573c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Long result = hisConfigInfoRepository.count(qHisConfigInfo.gmtModified.lt(startTime)); +====1 +1:551c + String sql = "SELECT max(id) FROM config_info"; +2:581a +3:581a +====1 +1:553c + return jt.queryForObject(sql, Long.class); +2:583,584c +3:583,584c + //TODO 关系型特性查询 + return configInfoRepository.findConfigMaxId(); +====1 +1:591,617c + try { + Boolean isReplaceOk = tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + String appNameTmp = appName == null ? "" : appName; + removeAggrConfigInfo(dataId, group, tenant); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + for (Map.Entry datumEntry : datumMap.entrySet()) { + jt.update(sql, dataId, group, tenantTmp, datumEntry.getKey(), appNameTmp, + datumEntry.getValue(), new Timestamp(System.currentTimeMillis())); + } + } catch (Throwable e) { + throw new TransactionSystemException("error in addAggrConfigInfo"); + } + return Boolean.TRUE; + } + }); + if (isReplaceOk == null) { + return false; + } + return isReplaceOk; + } catch (TransactionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:622c +3:622c + return true; +====1 +1:624,636c + String sql = "SELECT DISTINCT data_id, group_id FROM config_info"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:629c +3:629c + return null; +====1 +1:641,651c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,beta_ips FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO4BETA_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:634,647c +3:634,647c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenant)); + } + ConfigInfoBetaEntity configInfoBetaEntity = configInfoBetaRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoBeta data null")); + return ConfigInfo4BetaMapStruct.INSTANCE.convertConfigInfo4Beta(configInfoBetaEntity); +====1 +1:659,668c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,tag_id,app_name,content FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", + new Object[] {dataId, group, tenantTmp, tagTmp}, CONFIG_INFO4TAG_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:655,671c +3:655,671c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + ConfigInfoTagEntity result = configInfoTagRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoTag data null")); + return ConfigInfo4TagMapStruct.INSTANCE.convertConfigInfo4Tag(result); +====1 +1:674,684c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=? AND app_name=?", + new Object[] {dataId, group, tenantTmp, appName}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:677c +3:677c + return null; +====1 +1:690,733c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(group); + paramList.add(tenantTmp); + + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and group_id=? and tenant_id=? "); + if (StringUtils.isNotBlank(configTags)) { + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.group_id=? and a.tenant_id=? "); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sql.append(", "); + } + sql.append("?"); + paramList.add(tagArr[i]); + } + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return this.jt.queryForObject(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:683c +3:683c + return null; +====1 +1:739,748c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, StringUtils.EMPTY}, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:689,695c +3:689,695c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + return configInfoRepository.findOne(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group))).map(s -> { + ConfigInfoBase configInfoBase = new ConfigInfoBase(); + BeanUtils.copyProperties(s, configInfoBase); + configInfoBase.setGroup(s.getGroupId()); + return configInfoBase; + }).orElse(null); +====1 +1:753,762c + try { + return this.jt + .queryForObject("SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE ID=?", + new Object[] {id}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:700,701c +3:700,701c + + return null; +====1 +1:767,776c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5,type FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:706,712c +3:706,712c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:777a +2:714,718c +3:714,718c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity result = configInfoRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfo(result); +====1 +1:783,792c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:724,725c +3:724,725c + + return null; +====1 +1:798,807c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? and app_name=?", + new Object[] {dataId, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:731,732c +3:731,732c + + return null; +====1 +1:813,864c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where data_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:738c +3:738c + return null; +====1 +1:870,871c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); +2:743a +3:743a +====1 +1:874,922c + String sqlCount = "select count(*) from config_info"; + String sql = "select ID,data_id,group_id,tenant_id,app_name,content,type from config_info"; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id"; + sql = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id"; + + where.append(" a.tenant_id=? "); + + if (StringUtils.isNotBlank(dataId)) { + where.append(" and a.data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and a.group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and a.app_name=? "); + paramList.add(appName); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id=? "); + if (StringUtils.isNotBlank(dataId)) { + where.append(" and data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and app_name=? "); + paramList.add(appName); + } +2:746,765c +3:746,765c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + } + + private void buildConfigInfoCommonCondition(BooleanBuilder booleanBuilder, QConfigInfoEntity qConfigInfo, + final String dataId, final String group, final String appName) { + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); +====1 +1:924,929c + try { + return helper.fetchPage(sqlCount + where, sql + where, paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:767,771c +3:767,771c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(appName)) { + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:935,943c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:777c +3:777c + return null; +====1 +1:949,958c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:783c +3:783c + return null; +====1 +1:964,973c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=? and app_name =?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? and app_name =?", + new Object[] {group, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:789c +3:789c + return null; +====1 +1:979,1032c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder( + "select count(*) from config_info where group_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(group); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:795c +3:795c + return null; +====1 +1:1038,1047c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where tenant_id like ? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? and app_name=?", + new Object[] {generateLikeArgument(tenantTmp), appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:801c +3:801c + return null; +====1 +1:1053,1104c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where tenant_id like ? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:807c +3:807c + return null; +====1 +1:1110,1118c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:813c +3:813c + return null; +====1 +1:1123,1124c + String sql = " SELECT COUNT(ID) FROM config_info "; + Integer result = jt.queryForObject(sql, Integer.class); +2:818c +3:818c + Long result = configInfoRepository.count(); +====1 +1:1133,1134c + String sql = " SELECT COUNT(ID) FROM config_info where tenant_id like ?"; + Integer result = jt.queryForObject(sql, new Object[] {tenant}, Integer.class); +2:827,828c +3:827,828c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.tenantId.like(tenant)); +====1 +1:1143,1144c + String sql = " SELECT COUNT(ID) FROM config_info_beta "; + Integer result = jt.queryForObject(sql, Integer.class); +2:837c +3:837c + Long result = configInfoBetaRepository.count(); +====1 +1:1153,1154c + String sql = " SELECT COUNT(ID) FROM config_info_tag "; + Integer result = jt.queryForObject(sql, Integer.class); +2:846c +3:846c + Long result = configInfoTagRepository.count(); +====1 +1:1162,1165c + public List getTenantIdList(int page, int pageSize) { + String sql = "SELECT tenant_id FROM config_info WHERE tenant_id != '' GROUP BY tenant_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:854,864c +3:854,864c + public List getTenantIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("tenantId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1169,1172c + public List getGroupIdList(int page, int pageSize) { + String sql = "SELECT group_id FROM config_info WHERE tenant_id ='' GROUP BY group_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:868,878c +3:868,878c + public List getGroupIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("groupId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1178,1179c + String sql = " SELECT COUNT(ID) FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {dataId, group, tenantTmp}); +2:884,886c +3:884,886c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))); +====1 +1:1188,1213c + if (datumIds == null || datumIds.isEmpty()) { + return 0; + } + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder sql = new StringBuilder( + " SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ? and datum_id"); + if (isIn) { + sql.append(" in ("); + } else { + sql.append(" not in ("); + } + for (int i = 0, size = datumIds.size(); i < size; i++) { + if (i > 0) { + sql.append(", "); + } + sql.append("?"); + } + sql.append(")"); + + List objectList = Lists.newArrayList(dataId, group, tenantTmp); + objectList.addAll(datumIds); + Integer result = jt.queryForObject(sql.toString(), Integer.class, objectList.toArray()); + if (result == null) { + throw new IllegalArgumentException("aggrConfigInfoCount error"); + } + return result.intValue(); +2:895c +3:895c + return 0; +====1 +1:1228,1242c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5 " + + " FROM ( SELECT id FROM config_info WHERE tenant_id like ? ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, + new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:910c +3:910c + return null; +====1 +1:1247,1282c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String select = " SELECT data_id,group_id,app_name FROM ( " + + " SELECT id FROM config_info WHERE tenant_id LIKE ? ORDER BY id LIMIT ?, ? )" + + " g, config_info t WHERE g.id = t.id "; + + final int totalCount = configInfoCount(tenant); + int pageCount = totalCount / pageSize; + if (totalCount > pageSize * pageCount) { + pageCount++; + } + + if (pageNo > pageCount) { + return null; + } + + final Page page = new Page(); + page.setPageNumber(pageNo); + page.setPagesAvailable(pageCount); + page.setTotalCount(totalCount); + + try { + List result = jt + .query(select, new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, + // new Object[0], + CONFIG_KEY_ROW_MAPPER); + + for (ConfigKey item : result) { + page.getPageItems().add(item); + } + return page; + } catch (EmptyResultDataAccessException e) { + return page; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:915c +3:915c + return null; +====1 +1:1288,1300c + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,content,md5" + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) " + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:921c +3:921c + return null; +====1 +1:1305,1319c + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = " SELECT t.id,type,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + + List params = new ArrayList(); + + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, params.toArray(), pageNo, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:926c +3:926c + return null; +====1 +1:1324,1332c + String select = "SELECT id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,type from config_info where id > ? order by id asc limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(select, new Object[] {lastMaxId, 0, pageSize}, 1, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:931,940c +3:931,940c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(qConfigInfo.id.gt(lastMaxId), PageRequest.of(0, pageSize, Sort.by(Sort.Order.asc("id")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1337,1349c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_beta"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,beta_ips " + + " FROM ( SELECT id FROM config_info_beta ORDER BY id LIMIT ?,? )" + + " g, config_info_beta t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:945,952c +3:945,952c + org.springframework.data.domain.Page sPage = configInfoBetaRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoBetaWrapperMapStruct.INSTANCE.convertConfigInfoBetaWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1354,1366c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_tag"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,tag_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info_tag ORDER BY id LIMIT ?,? ) " + + "g, config_info_tag t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:957,964c +3:957,964c + org.springframework.data.domain.Page sPage = configInfoTagRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoTagWrapperMapStruct.INSTANCE.convertConfigInfoTagWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1372,1414c + // assert dataids group not null + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + // if dataids empty return empty list + if (CollectionUtils.isEmpty(dataIds)) { + return Collections.emptyList(); + } + + // Batch query limit + // The number of in is controlled within 100, the shorter the length of the SQL statement, the better + if (subQueryLimit > QUERY_LIMIT_SIZE) { + subQueryLimit = 50; + } + List result = new ArrayList(dataIds.size()); + + String sqlStart = "select data_id, group_id, tenant_id, app_name, content from config_info where group_id = ? and tenant_id = ? and data_id in ("; + String sqlEnd = ")"; + StringBuilder subQuerySql = new StringBuilder(); + + for (int i = 0; i < dataIds.size(); i += subQueryLimit) { + // dataids + List params = new ArrayList( + dataIds.subList(i, i + subQueryLimit < dataIds.size() ? i + subQueryLimit : dataIds.size())); + + for (int j = 0; j < params.size(); j++) { + subQuerySql.append("?"); + if (j != params.size() - 1) { + subQuerySql.append(","); + } + } + + // group + params.add(0, group); + params.add(1, tenantTmp); + + List r = this.jt + .query(sqlStart + subQuerySql.toString() + sqlEnd, params.toArray(), CONFIG_INFO_ROW_MAPPER); + + // assert not null + if (r != null && r.size() > 0) { + result.addAll(r); + } + } + return result; +2:970c +3:970c + return null; +====1 +1:1420,1463c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + if (StringUtils.isBlank(appName)) { + return this.findAllConfigInfo(pageNo, pageSize, tenantTmp); + } else { + return this.findConfigInfoByApp(pageNo, pageSize, tenantTmp, appName); + } + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + where += " and tenant_id like ? "; + params.add(generateLikeArgument(tenantTmp)); + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:976c +3:976c + return null; +====1 +1:1469,1562c + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + for (ConfigKey configInfo : configKeys) { + String dataId = configInfo.getDataId(); + String group = configInfo.getGroup(); + String appName = configInfo.getAppName(); + + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + return helper.fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:982c +3:982c + return null; +====1 +1:1572,1636c + PaginationHelper helper = createPaginationHelper(); + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info"; + StringBuilder where = new StringBuilder(" where "); + List params = new ArrayList(); + params.add(generateLikeArgument(tenantTmp)); + if (StringUtils.isNotBlank(configTags)) { + sqlCountRows = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id "; + sqlFetchRows = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join config_tags_relation b on a.id=b.id "; + + where.append(" a.tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and a.data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and a.group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and a.app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and a.content like ? "); + params.add(generateLikeArgument(content)); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + params.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and content like ? "); + params.add(generateLikeArgument(content)); + } + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:992,1008c +3:992,1008c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.like(tenant)); + } + if (StringUtils.isNotBlank(content)) { + booleanBuilder.and(qConfigInfo.content.like(content)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1642,1672c + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + throw new IOException("invalid param"); + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,content from config_info where "; + String where = " 1=1 and tenant_id='' "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1014c +3:1014c + return null; +====1 +1:1678,1691c + String sql = "SELECT id,data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; + + try { + return this.jt + .queryForObject(sql, new Object[] {dataId, group, tenantTmp, datumId}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + // EmptyResultDataAccessException, indicating that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); +2:1020,1026c +3:1020,1026c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); +====1 +1:1692a +2:1028,1036c +3:1028,1036c + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenantTmp)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } + + ConfigInfoAggrEntity configInfoAggrEntity = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggr(configInfoAggrEntity); +====1 +1:1697,1710c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "SELECT data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? ORDER BY datum_id"; + + try { + return this.jt.query(sql, new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1041c +3:1041c + return null; +====1 +1:1717,1730c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where data_id=? and " + + "group_id=? and tenant_id=? order by datum_id limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, new Object[] {dataId, group, tenantTmp}, sqlFetchRows, + new Object[] {dataId, group, tenantTmp, (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_AGGR_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1048,1058c +3:1048,1058c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + org.springframework.data.domain.Page sPage = configInfoAggrRepository.findAll( + qConfigInfoAggr.dataId.eq(dataId).and(qConfigInfoAggr.groupId.eq(group)) + .and(qConfigInfoAggr.tenantId.eq(tenantTmp)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.by("datumId")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggrList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1737,1831c + String sqlCountRows = "select count(*) from config_info_aggr where "; + String sqlFetchRows = "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + + for (ConfigKey configInfoAggr : configKeys) { + String dataId = configInfoAggr.getDataId(); + String group = configInfoAggr.getGroup(); + String appName = configInfoAggr.getAppName(); + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + Page result = helper + .fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_AGGR_ROW_MAPPER); + return result; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1065c +3:1065c + return null; +====1 +1:1836,1848c + String sql = "SELECT DISTINCT data_id, group_id, tenant_id FROM config_info_aggr"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_CHANGED_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1070,1071c +3:1070,1071c + List list = configInfoAggrRepository.findAllAggrGroup(); + return ConfigInfoChangedMapStruct.INSTANCE.convertConfigInfoChangedList(list); +====1 +1:1853,1864c + String sql = "SELECT datum_id FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND content = ? "; + + try { + return this.jt.queryForList(sql, new Object[] {dataId, groupId, content}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1076c +3:1076c + return null; +====1 +1:1869,1877c + try { + List> list = jt.queryForList( + "SELECT data_id, group_id, tenant_id, app_name, content, gmt_modified FROM config_info WHERE gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertChangeConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1081,1084c +3:1081,1084c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Iterable iterable = configInfoRepository + .findAll(qConfigInfo.gmtModified.goe(startTime).and(qConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList((List) iterable); +====1 +1:1884,1924c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_modified from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + if (!StringUtils.isBlank(tenantTmp)) { + where += " and tenant_id = ? "; + params.add(tenantTmp); + } + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (startTime != null) { + where += " and gmt_modified >=? "; + params.add(startTime); + } + if (endTime != null) { + where += " and gmt_modified <=? "; + params.add(endTime); + } + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + lastMaxId, CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1091c +3:1091c + return null; +====1 +1:1929,1937c + try { + List> list = jt.queryForList( + "SELECT DISTINCT data_id, group_id, tenant_id FROM his_config_info WHERE op_type = 'D' AND gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertDeletedConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1096,1100c +3:1096,1100c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository.findAll( + qHisConfigInfo.opType.eq("D").and(qHisConfigInfo.gmtModified.goe(startTime)) + .and(qHisConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList((List) iterable); +====1 +1:1943,1947c + final String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + final String tenantTmp = + StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + +2:1105a +3:1105a +====1 +1:1953,1960c + + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + + KeyHolder keyHolder = new GeneratedKeyHolder(); + + final String sql = + "INSERT INTO config_info(data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_create," + + "gmt_modified,c_desc,c_use,effect,type,c_schema) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; +2:1111,1120c +3:1111,1120c + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setGmtCreate(time); + configInfoEntity.setGmtModified(time); +====1 +1:1963,1991c + jt.update(new PreparedStatementCreator() { + @Override + public PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + PreparedStatement ps = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS); + ps.setString(1, configInfo.getDataId()); + ps.setString(2, configInfo.getGroup()); + ps.setString(3, tenantTmp); + ps.setString(4, appNameTmp); + ps.setString(5, configInfo.getContent()); + ps.setString(6, md5Tmp); + ps.setString(7, srcIp); + ps.setString(8, srcUser); + ps.setTimestamp(9, time); + ps.setTimestamp(10, time); + ps.setString(11, desc); + ps.setString(12, use); + ps.setString(13, effect); + ps.setString(14, type); + ps.setString(15, schema); + return ps; + } + }, keyHolder); + Number nu = keyHolder.getKey(); + if (nu == null) { + throw new IllegalArgumentException("insert config_info fail"); + } + return nu.longValue(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:1123,1124c +3:1123,1124c + return configInfoRepository.save(configInfoEntity).getId(); + } catch (Exception e) { +====1 +1:2008,2015c + try { + jt.update( + "INSERT INTO config_tags_relation(id,tag_name,tag_type,data_id,group_id,tenant_id) VALUES(?,?,?,?,?,?)", + configId, tagName, null, dataId, group, tenant); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1141,1147c +3:1141,1147c + ConfigTagsRelationEntity configTagsRelation = new ConfigTagsRelationEntity(); + configTagsRelation.setId(configId); + configTagsRelation.setTagName(tagName); + configTagsRelation.setDataId(dataId); + configTagsRelation.setGroupId(group); + configTagsRelation.setTenantId(tenant); + configTagsRelationRepository.save(configTagsRelation); +====1 +1:2020,2025c + try { + jt.update("DELETE FROM config_tags_relation WHERE id=?", id); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1152c +3:1152c + configTagsRelationRepository.findById(id).ifPresent(s -> configTagsRelationRepository.delete(s)); +====1 +1:2030,2040c + String sql = "SELECT tag_name FROM config_tags_relation WHERE tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1157c +3:1157c + return null; +====1 +1:2045,2055c + String sql = "SELECT tag_name FROM config_tags_relation WHERE data_id=? AND group_id=? AND tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {dataId, group, tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1162,1176c +3:1162,1176c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigTagsRelationEntity qConfigTagsRelation = QConfigTagsRelationEntity.configTagsRelationEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigTagsRelation.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigTagsRelation.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigTagsRelation.tenantId.eq(tenant)); + } + Iterable iterable = configTagsRelationRepository.findAll(booleanBuilder); + List result = new ArrayList<>(); + iterable.forEach(s -> result.add(s.getTagName())); + return result; +====1 +1:2061,2067c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + jt.update("DELETE FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, group, + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1182,1187c +3:1182,1187c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); +====1 +1:2068a +2:1189,1190c +3:1189,1190c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + configInfos.forEach(s -> configInfoRepository.delete(s)); +====1 +1:2076,2077c + StringBuilder sql = new StringBuilder(SQL_DELETE_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1198,1200c +3:1198,1200c + if (StringUtils.isBlank(ids)) { + return; + } +====1 +1:2081,2084c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1203a +3:1203a +====1 +1:2087,2093c + sql.append(") "); + try { + jt.update(sql.toString(), paramList.toArray()); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1206,1219c +3:1206,1219c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + for (Long id : paramList) { + configInfoRepository.deleteById(id); + } + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2099,2106c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String tagTmp = StringUtils.isBlank(tag) ? StringUtils.EMPTY : tag; + try { + jt.update("DELETE FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", dataId, + group, tenantTmp, tagTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1225,1234c +3:1225,1234c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); +====1 +1:2107a +2:1236,1251c +3:1236,1251c + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + Iterable configInfoTags = configInfoTagRepository.findAll(booleanBuilder); + configInfoTags.forEach(s -> configInfoTagRepository.delete(s)); + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2113,2115c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String tenantTmp = StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:1257,1258c +3:1257,1258c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:2122,2131c + try { + jt.update("UPDATE config_info SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?," + + "app_name=?,c_desc=?,c_use=?,effect=?,type=?,c_schema=? " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5Tmp, srcIp, srcUser, + time, appNameTmp, desc, use, effect, type, schema, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1265,1272c +3:1265,1272c + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setGmtModified(time); + configInfoRepository.save(configInfoEntity); +====1 +1:2139,2140c + StringBuilder sql = new StringBuilder(SQL_FIND_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1279a +3:1279a +====1 +1:2144,2147c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1282a +3:1282a +====1 +1:2150,2158c + sql.append(") "); + try { + return this.jt.query(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1285,1288c +3:1285,1288c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + List list = (List) configInfoRepository + .findAll(qConfigInfo.id.in(paramList)); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(list); +====1 +1:2163,2176c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAdvanceInfo configAdvance = this.jt.queryForObject( + "SELECT gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_ADVANCE_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1293,1314c +3:1293,1314c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAdvanceInfo configAdvance = ConfigAdvanceInfoMapStruct.INSTANCE.convertConfigAdvanceInfo(configInfo); + List configTagList = this.selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2178c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1315a +3:1315a +====1 +1:2180,2185c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1317c +3:1317c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2186a +2:1319c +3:1319c + return configAdvance; +====1 +1:2191,2206c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAllInfo configAdvance = this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5," + + "gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_ALL_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1324,1346c +3:1324,1346c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAllInfo configAdvance = ConfigAllInfoMapStruct.INSTANCE.convertConfigAllInfo(configInfo); + configAdvance.setGroup(configInfo.getGroupId()); + List configTagList = selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2208c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1347a +3:1347a +====1 +1:2210,2215c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1349c +3:1349c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2216a +2:1351c +3:1351c + return configAdvance; +====1 +1:2225,2233c + try { + jt.update( + "INSERT INTO his_config_info (id,data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_modified,op_type) " + + "VALUES(?,?,?,?,?,?,?,?,?,?,?)", id, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp, appNameTmp, configInfo.getContent(), md5Tmp, srcIp, srcUser, time, ops); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1360,1373c +3:1360,1373c + HisConfigInfoEntity hisConfigInfo = new HisConfigInfoEntity(); + hisConfigInfo.setId(id); + hisConfigInfo.setDataId(configInfo.getDataId()); + hisConfigInfo.setGroupId(configInfo.getGroup()); + hisConfigInfo.setAppName(appNameTmp); + hisConfigInfo.setContent(configInfo.getContent()); + hisConfigInfo.setMd5(md5Tmp); + hisConfigInfo.setGmtModified(time); + hisConfigInfo.setSrcUser(srcUser); + hisConfigInfo.setSrcIp(srcIp); + hisConfigInfo.setOpType(ops); + hisConfigInfo.setTenantId(tenantTmp); + hisConfigInfo.setGmtCreate(time); + hisConfigInfoRepository.save(hisConfigInfo); +====1 +1:2239,2255c + PaginationHelper helper = createPaginationHelper(); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from his_config_info where data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select nid,data_id,group_id,tenant_id,app_name,src_ip,src_user,op_type,gmt_create,gmt_modified from his_config_info " + + "where data_id = ? and group_id = ? and tenant_id = ? order by nid desc"; + + Page page = null; + try { + page = helper + .fetchPage(sqlCountRows, sqlFetchRows, new Object[] {dataId, group, tenantTmp}, pageNo, pageSize, + HISTORY_LIST_ROW_MAPPER); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG + .error("[list-config-history] error, dataId:{}, group:{}", new Object[] {dataId, group}, e); + throw e; + } +2:1379,1389c +3:1379,1389c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + org.springframework.data.domain.Page sPage = hisConfigInfoRepository.findAll( + qHisConfigInfo.dataId.eq(dataId).and(qHisConfigInfo.groupId.eq(group)) + .and(qHisConfigInfo.tenantId.eq(tenant)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("nid")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); +====1 +1:2262,2270c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "INSERT INTO app_configdata_relation_subs(data_id,group_id,app_name,gmt_modified) VALUES(?,?,?,?)", + dataId, group, appNameTmp, date); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1395a +3:1395a +====1 +1:2276,2284c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "UPDATE app_configdata_relation_subs SET gmt_modified=? WHERE data_id=? AND group_id=? AND app_name=?", + time, dataId, group, appNameTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1401c +3:1401c + +==== +1:2289c + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; +2:1406,1434c + <<<<<<< HEAD + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + ||||||| a41d209d5 + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[list-config-history] error, nid:{}", new Object[] {nid}, e); + throw e; + } + ======= + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[detail-config-history] error, nid:{}", new Object[] {nid}, e); + throw e; + } + } + + @Override + public ConfigHistoryInfo detailPreviousConfigHistory(Long id) { + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = (select max(nid) from his_config_info where id = ?) "; +3:1406,1413c + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + } + + @Override + public ConfigHistoryInfo detailPreviousConfigHistory(Long id) { + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = (select max(nid) from his_config_info where id = ?) "; +====1 +1:2292c + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); +2:1437c +3:1416c + .queryForObject(sqlFetchRows, new Object[] {id}, HISTORY_DETAIL_ROW_MAPPER); +====1 +1:2295c + LogUtil.FATAL_LOG.error("[list-config-history] error, nid:{}", new Object[] {nid}, e); +2:1440c +3:1419c + LogUtil.FATAL_LOG.error("[detail-previous-config-history] error, id:{}", new Object[] {id}, e); +====2 +1:2297a +3:1421a +2:1443c + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:2303,2310c + try { + jt.update( + "INSERT INTO tenant_info(kp,tenant_id,tenant_name,tenant_desc,create_source,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?)", + kp, tenantId, tenantName, tenantDesc, createResoure, time, time); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1449,1457c +3:1427,1435c + TenantInfoEntity tenantInfo = new TenantInfoEntity(); + tenantInfo.setKp(kp); + tenantInfo.setTenantId(tenantId); + tenantInfo.setTenantName(tenantName); + tenantInfo.setTenantDesc(tenantDesc); + tenantInfo.setCreateSource(createResoure); + tenantInfo.setGmtCreate(time); + tenantInfo.setGmtModified(time); + tenantInfoRepository.save(tenantInfo); +====1 +1:2315,2322c + try { + jt.update( + "UPDATE tenant_info SET tenant_name = ?, tenant_desc = ?, gmt_modified= ? WHERE kp=? AND tenant_id=?", + tenantName, tenantDesc, System.currentTimeMillis(), kp, tenantId); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1462,1467c +3:1440,1445c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + tenantInfoRepository.findOne(qTenantInfo.kp.eq(kp).and(qTenantInfo.tenantId.eq(tenantId))).ifPresent(s -> { + s.setTenantName(tenantName); + s.setTenantDesc(tenantDesc); + tenantInfoRepository.save(s); + }); +====1 +1:2327,2338c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=?"; + try { + return this.jt.query(sql, new Object[] {kp}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1472,1473c +3:1450,1451c + List list = tenantInfoRepository.findByKp(kp); + return TenantInfoMapStruct.INSTANCE.convertTenantInfoList(list); +====1 +1:2343,2354c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=? AND tenant_id=?"; + try { + return jt.queryForObject(sql, new Object[] {kp, tenantId}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1478,1479c +3:1456,1457c + TenantInfoEntity tenantInfoEntity = tenantInfoRepository.findByKpAndTenantId(kp, tenantId); + return TenantInfoMapStruct.INSTANCE.convertTenantInfo(tenantInfoEntity); +====1 +1:2359,2364c + try { + jt.update("DELETE FROM tenant_info WHERE kp=? AND tenant_id=?", kp, tenantId); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1484,1485c +3:1462,1463c + tenantInfoRepository.findOne(QTenantInfoEntity.tenantInfoEntity.tenantId.eq(tenantId) + .and(QTenantInfoEntity.tenantInfoEntity.kp.eq(kp))).ifPresent(s -> tenantInfoRepository.delete(s)); +====1 +1:2418,2431c + String sqlCountRows = " SELECT COUNT(*) FROM config_info "; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,md5,type,gmt_modified FROM " + + "( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) g, config_info t WHERE g.id = t.id"; + PaginationHelper helper = createPaginationHelper(); + try { + Page page = helper + .fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_WRAPPER_ROW_MAPPER); + + return page.getPageItems(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1539c +3:1517c + return null; +====1 +1:2448,2458c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,type,gmt_modified,md5 FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1556,1560c +3:1534,1538c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + ConfigInfoEntity result = configInfoRepository.findOne( + qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))).orElse(null); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapper(result); +====1 +1:2463,2469c + String sql = String.format("select 1 from %s limit 1", tableName); + try { + jt.queryForObject(sql, Integer.class); + return true; + } catch (Throwable e) { + return false; + } +2:1565c +3:1543c + return true; +====1 +1:2518,2530c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList<>(); + if (!CollectionUtils.isEmpty(ids)) { + where.append(" id in ("); + for (int i = 0; i < ids.size(); i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(ids.get(i)); + } + where.append(") "); +2:1614,1617c +3:1592,1595c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (!org.springframework.util.CollectionUtils.isEmpty(ids)) { + booleanBuilder.and(qConfigInfo.id.in(ids)); +====1 +1:2532,2536c + where.append(" tenant_id=? "); + paramList.add(tenantTmp); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + paramList.add(generateLikeArgument(dataId)); +2:1619,1623c +3:1597,1601c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.like(dataId)); +====1 +1:2539,2540c + where.append(" and group_id=? "); + paramList.add(group); +2:1626c +3:1604c + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:2543,2544c + where.append(" and app_name=? "); + paramList.add(appName); +2:1629c +3:1607c + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:2547,2552c + try { + return this.jt.query(SQL_FIND_ALL_CONFIG_INFO + where, paramList.toArray(), CONFIG_ALL_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1632,1640c +3:1610,1618c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + List resultList = new ArrayList<>(); + configInfos.forEach(s -> { + ConfigAllInfo configAllInfo = new ConfigAllInfo(); + BeanUtils.copyProperties(s, configAllInfo); + configAllInfo.setGroup(s.getGroupId()); + resultList.add(configAllInfo); + }); + return resultList; +====1 +1:2643,2647c + Integer result = this.jt + .queryForObject(SQL_TENANT_INFO_COUNT_BY_TENANT_ID, new String[] {tenantId}, Integer.class); + if (result == null) { + return 0; + } +2:1731,1732c +3:1709,1710c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + Long result = tenantInfoRepository.count(qTenantInfo.tenantId.eq(tenantId)); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_HistoryController.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_HistoryController.java.txt new file mode 100644 index 0000000000..b506cbd739 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_HistoryController.java.txt @@ -0,0 +1,120 @@ +====1 +1:31,33c + import javax.servlet.http.HttpServletRequest; + import javax.servlet.http.HttpServletResponse; + +2:30a +3:30a +====2 +1:42c +3:39c + +2:39c + +====2 +1:45c +3:42c + +2:42c + +====1 +1:49,53c + * @param dataId dataId string value. + * @param group group string value. + * @param tenant tenant string value. + * @param appName appName string value. + * @param pageNo pageNo string value. +2:46,50c +3:46,50c + * @param dataId dataId string value. + * @param group group string value. + * @param tenant tenant string value. + * @param appName appName string value. + * @param pageNo pageNo string value. +====2 +1:60,66c +3:57,63c + @RequestParam("group") String group, // + @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant, + @RequestParam(value = "appName", required = false) String appName, + @RequestParam(value = "pageNo", required = false) Integer pageNo, + // + @RequestParam(value = "pageSize", required = false) Integer pageSize, // + ModelMap modelMap) { +2:57,63c + @RequestParam("group") String group, // + @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant, + @RequestParam(value = "appName", required = false) String appName, + @RequestParam(value = "pageNo", required = false) Integer pageNo, + // + @RequestParam(value = "pageSize", required = false) Integer pageSize, // + ModelMap modelMap) { +====2 +1:73c +3:70c + +2:70c + +====1 +1:75c + * Query the detailed configuration history informations. +2:72,75c +3:72,75c + * Query the detailed configuration history information. + * + * @param nid history_config_info nid + * @return history config info +==== +1:78,79c + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { +2:78,86c + <<<<<<< HEAD + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { + ||||||| a41d209d5 + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { + ======= + public ConfigHistoryInfo getConfigHistoryInfo(@RequestParam("nid") Long nid) { + >>>>>>> TEMP_RIGHT_BRANCH +3:78c + public ConfigHistoryInfo getConfigHistoryInfo(@RequestParam("nid") Long nid) { +====2 +1:81a +3:80a +2:89,93c + <<<<<<< HEAD + + ||||||| a41d209d5 + + ======= +==== +1:82a +2:95,107c + /** + * Query previous config history information. + * + * @param id config_info id + * @return history config info + * @since 1.4.0 + */ + @GetMapping(value = "/previous") + public ConfigHistoryInfo getPreviousConfigHistoryInfo(@RequestParam("id") Long id) { + return persistService.detailPreviousConfigHistory(id); + } + + >>>>>>> TEMP_RIGHT_BRANCH +3:82,93c + /** + * Query previous config history information. + * + * @param id config_info id + * @return history config info + * @since 1.4.0 + */ + @GetMapping(value = "/previous") + public ConfigHistoryInfo getPreviousConfigHistoryInfo(@RequestParam("id") Long id) { + return persistService.detailPreviousConfigHistory(id); + } + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_MergeDatumService.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_MergeDatumService.java.txt new file mode 100644 index 0000000000..1d2e6e10d3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_MergeDatumService.java.txt @@ -0,0 +1,43 @@ +====1 +1:30,31c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:30,32c +3:30,32c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.ApplicationUtils; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:109c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIp()); +2:110c +3:110c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIP()); +====1 +1:117c + if (ApplicationUtils.getStandaloneMode()) { +2:118c +3:118c + if (EnvUtil.getStandaloneMode()) { +==== +1:166c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); +2:167,177c + <<<<<<< HEAD + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn( + "[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); + ||||||| a41d209d5 + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn("[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); + ======= + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIP(), null); +3:167c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIP(), null); +====2 +1:168a +3:169a +2:180c + >>>>>>> TEMP_RIGHT_BRANCH diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_MergeTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_MergeTaskProcessor.java.txt new file mode 100644 index 0000000000..5df60ac501 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_MergeTaskProcessor.java.txt @@ -0,0 +1,47 @@ +==== +1:19a +2:20,26c + <<<<<<< HEAD + import com.alibaba.nacos.common.task.AbstractDelayTask; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; + ======= + import com.alibaba.nacos.common.task.NacosTask; +3:20c + import com.alibaba.nacos.common.task.NacosTask; +==== +1:21c + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:28c + >>>>>>> TEMP_RIGHT_BRANCH +3:21a +====1 +1:22a +2:30c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:31c + import com.alibaba.nacos.core.utils.InetUtils; +2:39c +3:32c + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:55c + public boolean process(AbstractDelayTask task) { +2:63c +3:56c + public boolean process(NacosTask task) { +====1 +1:87c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +2:95c +3:88c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), +====1 +1:101c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +2:109c +3:102c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_NotifyTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_NotifyTaskProcessor.java.txt new file mode 100644 index 0000000000..9dcd6442e3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_NotifyTaskProcessor.java.txt @@ -0,0 +1,67 @@ +==== +1:19a +2:20,26c + <<<<<<< HEAD + import com.alibaba.nacos.common.task.AbstractDelayTask; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; + ======= + import com.alibaba.nacos.common.task.NacosTask; +3:20c + import com.alibaba.nacos.common.task.NacosTask; +==== +1:21c + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:28c + >>>>>>> TEMP_RIGHT_BRANCH +3:21a +====1 +1:22a +2:30c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:27,28c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:35,36c +3:28,29c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:49c + public boolean process(AbstractDelayTask task) { +2:57c +3:50c + public boolean process(NacosTask task) { +====1 +1:76c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +2:84c +3:77c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====1 +1:78c + .format(URL_PATTERN, serverIp, ApplicationUtils.getContextPath(), dataId, group); +2:86c +3:79c + .format(URL_PATTERN, serverIp, EnvUtil.getContextPath(), dataId, group); +====1 +1:82c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:90c +3:83c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====1 +1:92c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:100c +3:93c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====1 +1:100c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:108c +3:101c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_application.properties.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_application.properties.txt new file mode 100644 index 0000000000..d78b420770 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_application.properties.txt @@ -0,0 +1,89 @@ +====1 +1:40,41c + # db.user=nacos + # db.password=nacos +2:40,41c +3:40,41c + # db.user.0=nacos + # db.password.0=nacos +====1 +1:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-fe/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +2:112c +3:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-ui/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +==== +1:177a +2:178,214c + + <<<<<<< HEAD + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + + ||||||| a41d209d5 + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:178,211c + + + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_pom.xml.txt new file mode 100644 index 0000000000..99b5611a8e --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort/diff_pom.xml.txt @@ -0,0 +1,169 @@ +====1 +1:25c + 1.4.0-SNAPSHOT +2:25c +3:25c + 1.4.1-SNAPSHOT +====1 +1:39c + nacos-all-1.4.0-SNAPSHOT +2:39c +3:39c + nacos-all-1.4.1-SNAPSHOT +====1 +1:129c + 2.1.16.RELEASE +2:129c +3:129c + 2.1.17.RELEASE +====1 +1:131c + 2.6 +2:130a +3:130a +====1 +1:133c + 2.2 +2:132c +3:132c + 2.6 +====1 +1:144c + 1.7.17 +2:142a +3:142a +====1 +1:170a +2:169,175c +3:169,175c + 1.3.2.beta1 + 1.3.2.beta1 + 1.3.1.Final + 19.3.0.0 + 4.2.1 + 3.4.1 + 1.18.12 +====1 +1:279a +2:285,287c +3:285,287c + **/com/alibaba/nacos/config/server/modules/entity/*.java + **/com/alibaba/nacos/config/server/modules/mapstruct/*.java + **/com/alibaba/nacos/config/server/configuration/datasource/DynamicDataSource.java +==== +1:307c + **/istio/model/**,**/nacos/test/** +2:315,321c + <<<<<<< HEAD + **/istio/model/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** + ||||||| a41d209d5 + **/istio/model/**,**/nacos/test/** + ======= + **/istio/model/**,**/consistency/entity/**,**/nacos/test/** + >>>>>>> TEMP_RIGHT_BRANCH +3:315c + **/istio/model/**,**/consistency/entity/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** +====1 +1:341a +2:356c +3:350c + /console-ui/** +====1 +1:553a +2:569,587c +3:563,581c + + remove-test-data + + + + org.apache.maven.plugins + maven-clean-plugin + + false + + + ${user.home}/nacos/data + + + + + + + +====1 +1:581a +2:616c +3:610c + sys +====1 +1:688a +2:724,728c +3:718,722c + + ${project.groupId} + nacos-sys + ${project.version} + +====1 +1:712,717c + + commons-lang + commons-lang + ${commons-lang.version} + + +2:751a +3:745a +====1 +1:817,822c + + com.ning + async-http-client + ${async-http-client.version} + + +2:850a +3:844a +====1 +1:1027a +2:1056,1093c +3:1050,1087c + + + org.mapstruct + mapstruct-jdk8 + ${mapstruct.version} + + + + org.mapstruct + mapstruct-processor + ${mapstruct.version} + + + + org.projectlombok + lombok + true + ${lombok.version} + + + + com.querydsl + querydsl-jpa + ${querydsl.version} + + + + com.zaxxer + HikariCP + ${hikariCP.version} + + + + com.oracle.ojdbc + ojdbc8 + ${ojdbc.version} + + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_AsyncNotifyService.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_AsyncNotifyService.java.txt new file mode 100644 index 0000000000..b0d839171d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_AsyncNotifyService.java.txt @@ -0,0 +1,74 @@ +====3 +1:35,36c +2:35,36c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +3:35,36c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:107c + +2:107c +3:107c + +====3 +1:130c +2:130c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +3:130c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +==== +1:136,137c + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +2:136,138c + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, + String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +3:136,137c + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====1 +1:157c + +2:158c +3:157c + +====3 +1:171c +2:172c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +3:171c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +====3 +1:177c +2:178c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +3:177c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +====3 +1:199c +2:200c + InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +3:199c + InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +====3 +1:262c +2:263c + this.url = MessageFormat.format(URL_PATTERN, target, ApplicationUtils.getContextPath(), dataId, group); +3:262c + this.url = MessageFormat.format(URL_PATTERN, target, EnvUtil.getContextPath(), dataId, group); +====3 +1:265c +2:266c + .format(URL_PATTERN_TENANT, target, ApplicationUtils.getContextPath(), dataId, group, tenant); +3:265c + .format(URL_PATTERN_TENANT, target, EnvUtil.getContextPath(), dataId, group, tenant); +====1 +1:312c + } +\ No newline at end of file +2:313c +3:312c + } diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_ConfigController.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_ConfigController.java.txt new file mode 100644 index 0000000000..b8fee64e3d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_ConfigController.java.txt @@ -0,0 +1,183 @@ +====3 +1:18a +2:18a +3:19c + import com.alibaba.nacos.api.config.ConfigType; +====3 +1:22a +2:22a +3:24c + import com.alibaba.nacos.common.model.RestResultUtils; +====1 +1:34a +2:35c +3:37c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +====1 +1:38c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +2:38a +3:40a +====3 +1:45a +2:45a +3:48c + import com.alibaba.nacos.common.utils.NamespaceUtil; +====3 +1:48c +2:48c + import com.alibaba.nacos.core.utils.InetUtils; +3:51c + import com.alibaba.nacos.sys.utils.InetUtils; +====3 +1:93,94c +2:93,94c + private static final String NAMESPACE_PUBLIC_KEY = "public"; + +3:95a +====3 +1:137a +2:137a +3:139,142c + //check type + if (!ConfigType.isValidType(type)) { + type = ConfigType.getDefaultType().getType(); + } +====3 +1:178c +2:178c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIp(), +3:183c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====3 +1:199c +2:199c + tenant = processTenant(tenant); +3:204c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====3 +1:283c +2:283c + return ResultBuilder.buildSuccessResult(true); +3:288c + return RestResultUtils.success(true); +====3 +1:472c +2:472c + tenant = processTenant(tenant); +3:477c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====3 +1:527c +2:527c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +3:532c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====3 +1:530,534c +2:530,534c + if (StringUtils.isNotBlank(namespace)) { + if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); + } +3:535,538c + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====3 +1:535a +2:535a +3:540c + +====3 +1:548c +2:548c + return ResultBuilder.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +3:553c + return RestResultUtils.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +====3 +1:560c +2:560c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +3:565c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +====3 +1:584c +2:584c + return ResultBuilder.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +3:589c + return RestResultUtils.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +====3 +1:588c +2:588c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +3:593c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====3 +1:601c +2:601c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +3:606c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====3 +1:604c +2:604c + return ResultBuilder.buildSuccessResult("导入成功", saveResult); +3:609c + return RestResultUtils.success("导入成功", saveResult); +====3 +1:628c +2:628c + return ResultBuilder.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +3:633c + return RestResultUtils.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +====3 +1:631,634c +2:631,634c + + if (NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(namespace)) { + namespace = ""; + } else if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { +3:636,638c + + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { +====3 +1:636c +2:636c + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +3:640c + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====3 +1:650c +2:650c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +3:654c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====3 +1:674c +2:674c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +3:678c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====3 +1:687c +2:687c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +3:691c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====3 +1:690,697c +2:690,697c + return ResultBuilder.buildSuccessResult("Clone Completed Successfully", saveResult); + } + + private String processTenant(String tenant) { + if (StringUtils.isEmpty(tenant) || NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(tenant)) { + return ""; + } + return tenant; +3:694c + return RestResultUtils.success("Clone Completed Successfully", saveResult); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_DiskUtils.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_DiskUtils.java.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_EmbeddedRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_EmbeddedRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..c5d1158c4b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_EmbeddedRolePersistServiceImpl.java.txt @@ -0,0 +1,163 @@ +====1 +1:21c + import com.alibaba.nacos.config.server.service.repository.embedded.EmbeddedStoragePersistServiceImpl; +2:20a +3:20a +====1 +1:23a +2:23c +3:23c + import com.alibaba.nacos.config.server.service.repository.embedded.EmbeddedStoragePersistServiceImpl; +====3 +1:30a +2:30a +3:31c + import java.util.Collections; +====2 +1:43c +3:44c + +2:43c + +====2 +1:46c +3:47c + +2:46c + +====2 +1:49c +3:50c + +2:49c + +====2 +1:51c +3:52c + +2:51c + +====2 +1:53c +3:54c + +2:53c + +====2 +1:56c +3:57c + +2:56c + +====2 +1:58c +3:59c + +2:58c + +====2 +1:68c +3:69c + +2:68c + +====2 +1:70c +3:71c + +2:70c + +====2 +1:72c +3:73c + +2:72c + +====2 +1:74c +3:75c + +2:74c + +==== +1:77,80c + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { +2:77,80c + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { +3:78,83c + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { +==== +1:83,84c + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, +2:83,84c + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, +3:86,87c + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, +====2 +1:86c +3:89c + +2:86c + +====2 +1:88c +3:91c + +2:88c + +====2 +1:96c +3:99c + +2:96c + +====2 +1:98c +3:101c + +2:98c + +====2 +1:106c +3:109c + +2:106c + +====2 +1:121c +3:124c + +2:121c + +====2 +1:137c +3:140c + +2:137c + +====2 +1:141c +3:144c + List users = databaseOperate.queryMany(sql, new String[] {"%" + role + "%"}, String.class); +2:141c + List users = databaseOperate.queryMany(sql, new String[]{"%" + role + "%"}, String.class); +====2 +1:144c +3:147c + +2:144c + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_ExternalDataSourceServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_ExternalDataSourceServiceImpl.java.txt new file mode 100644 index 0000000000..0492051512 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_ExternalDataSourceServiceImpl.java.txt @@ -0,0 +1,150 @@ +====1 +1:19,39c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import javax.sql.DataSource; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + +2:18a +3:18a +====3 +1:40a +2:19a +3:20c + import com.alibaba.nacos.common.utils.IPUtil; +====2 +1:43c +3:23c + import com.alibaba.nacos.config.server.utils.ConfigExecutor; +2:21a +====3 +1:45c +2:23c + import com.alibaba.nacos.core.utils.ApplicationUtils; +3:25c + import com.alibaba.nacos.sys.env.EnvUtil; +==== +1:46a +2:25,42c + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +3:27,41c + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +====3 +1:55,58c +2:51,54c + private static final Logger LOGGER = LoggerFactory.getLogger(ExternalDataSourceServiceImpl.class); + + private static final String JDBC_DRIVER_NAME = "com.mysql.cj.jdbc.Driver"; + +3:49a +====3 +1:86,87c +2:82,83c + private static Pattern ipPattern = Pattern.compile("\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}"); + +3:76a +====1 +1:120,122c + + ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +2:116,118c +3:109,111c + + // ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + // ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +====3 +1:130c +2:126c + .build(ApplicationUtils.getEnvironment(), (dataSource) -> { +3:119c + .build(EnvUtil.getEnvironment(), (dataSource) -> { +====3 +1:193c +2:189c + return "DOWN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +3:182c + return "DOWN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====3 +1:196c +2:192c + return "WARN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +3:185c + return "WARN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====3 +1:204,217c +2:200,213c + private String getIpFromUrl(String url) { + + Matcher m = ipPattern.matcher(url); + if (m.find()) { + return m.group(); + } + + return ""; + } + + static String defaultIfNull(String value, String defaultValue) { + return null == value ? defaultValue : value; + } + +3:192a +====3 +1:272c +2:268c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +3:247c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); +====3 +1:275c +2:271c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +3:250c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_ExternalPermissionPersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_ExternalPermissionPersistServiceImpl.java.txt new file mode 100644 index 0000000000..5424bf2de0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_ExternalPermissionPersistServiceImpl.java.txt @@ -0,0 +1,128 @@ +==== +1:21,24c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; +2:21,24c + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; +3:21,28c + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; +====1 +1:27,28c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; +2:27c +3:31c + import org.springframework.data.domain.PageRequest; +==== +1:31,35c + import javax.annotation.PostConstruct; + import java.util.ArrayList; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.PERMISSION_ROW_MAPPER; + +2:29a +3:34,40c + import javax.annotation.PostConstruct; + import java.util.ArrayList; + import java.util.Collections; + import java.util.List; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.PERMISSION_ROW_MAPPER; + +====1 +1:46,53c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:40c +3:51c + private PermissionsRepository permissionsRepository; +====1 +1:56,83c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role='" + role + "' "; + + if (StringUtils.isBlank(role)) { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:43,50c +3:54,61c + org.springframework.data.domain.Page sPage = permissionsRepository + .findAll(QPermissionsEntity.permissionsEntity.role.eq(role), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(PermissionsMapStruct.INSTANCE.convertPermissionInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:94,102c + + String sql = "INSERT into permissions (role, resource, action) VALUES (?, ?, ?)"; + + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:61,62c +3:72,73c + + permissionsRepository.save(new PermissionsEntity(role, resource, action)); +====1 +1:113,120c + + String sql = "DELETE from permissions WHERE role=? and resource=? and action=?"; + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:73,77c +3:84,88c + + QPermissionsEntity qPermissions = QPermissionsEntity.permissionsEntity; + permissionsRepository.findOne( + qPermissions.role.eq(role).and(qPermissions.resource.eq(resource)).and(qPermissions.action.eq(action))) + .ifPresent(p -> permissionsRepository.delete(p)); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_ExternalRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_ExternalRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..17328923af --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_ExternalRolePersistServiceImpl.java.txt @@ -0,0 +1,204 @@ +==== +1:21,24c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; +2:21,24c + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; +3:21,28c + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; +====1 +1:27,29c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.RowMapper; +2:27c +3:31c + import org.springframework.data.domain.PageRequest; +==== +1:32,35c + import javax.annotation.PostConstruct; + import java.sql.ResultSet; + import java.sql.SQLException; + import java.util.ArrayList; +2:29a +3:34,38c + import javax.annotation.PostConstruct; + import java.sql.ResultSet; + import java.sql.SQLException; + import java.util.ArrayList; + import java.util.Collections; +====1 +1:36a +2:31c +3:40c + import java.util.stream.Collectors; +====1 +1:38c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.ROLE_INFO_ROW_MAPPER; +2:32a +3:41a +====1 +1:50,57c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:44c +3:53c + private RolesRepository rolesRepository; +====1 +1:61,81c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from (select distinct role from roles) roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " 1=1 "; + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + return pageInfo; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:48,55c +3:57,64c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:86,104c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + where = " 1=1 "; + } + + try { + return helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:60,67c +3:69,76c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(QRolesEntity.rolesEntity.username.eq(username), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:110c + * @param role role string value. +2:73c +3:82c + * @param role role string value. +====1 +1:115,122c + String sql = "INSERT into roles (role, username) VALUES (?, ?)"; + + try { + jt.update(sql, role, userName); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:78c +3:87c + rolesRepository.save(new RolesEntity(userName, role)); +====1 +1:131,137c + String sql = "DELETE from roles WHERE role=?"; + try { + jt.update(sql, role); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:87,88c +3:96,97c + Iterable iterable = rolesRepository.findAll(QRolesEntity.rolesEntity.role.eq(role)); + rolesRepository.deleteAll(iterable); +====1 +1:143c + * @param role role string value. +2:94c +3:103c + * @param role role string value. +====1 +1:147,153c + String sql = "DELETE from roles WHERE role=? and username=?"; + try { + jt.update(sql, role, username); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:98,100c +3:107,109c + QRolesEntity qRoles = QRolesEntity.rolesEntity; + rolesRepository.findOne(qRoles.role.eq(role).and(qRoles.username.eq(username))) + .ifPresent(s -> rolesRepository.delete(s)); +====1 +1:158,160c + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[]{role}, String.class); + return users; +2:105,107c +3:114,116c + List rolesEntities = (List) rolesRepository + .findAll(QRolesEntity.rolesEntity.role.like(role)); + return rolesEntities.stream().map(s -> s.getRole()).collect(Collectors.toList()); +====1 +1:163,172c + private static final class RoleInfoRowMapper implements RowMapper { + + @Override + public RoleInfo mapRow(ResultSet rs, int rowNum) throws SQLException { + RoleInfo roleInfo = new RoleInfo(); + roleInfo.setRole(rs.getString("role")); + roleInfo.setUsername(rs.getString("username")); + return roleInfo; + } + } +2:109a +3:118a diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_ExternalStoragePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_ExternalStoragePersistServiceImpl.java.txt new file mode 100644 index 0000000000..d4cc7360ae --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_ExternalStoragePersistServiceImpl.java.txt @@ -0,0 +1,2999 @@ +====1 +1:41,42c + import com.alibaba.nacos.config.server.service.datasource.DataSourceService; + import com.alibaba.nacos.config.server.service.datasource.DynamicDataSource; +2:41,74c +3:41,74c + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.HisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.QHisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QTenantInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.TenantInfoEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAdvanceInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAllInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigHistoryInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4BetaMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4TagMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoAggrMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoBetaWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoChangedMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoEntityMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoTagWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.TenantInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoAggrRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoBetaRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoTagRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigTagsRelationRepository; + import com.alibaba.nacos.config.server.modules.repository.HisConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.TenantInfoRepository; +====1 +1:48c + import com.google.common.collect.Lists; +2:80,81c +3:80,81c + import com.querydsl.core.BooleanBuilder; + import lombok.extern.slf4j.Slf4j; +====1 +1:50a +2:84,85c +3:84,85c + import org.springframework.beans.BeanUtils; + import org.springframework.beans.factory.annotation.Autowired; +====1 +1:55c + import org.springframework.dao.IncorrectResultSizeDataAccessException; +2:90,92c +3:90,92c + import org.springframework.data.domain.PageRequest; + import org.springframework.data.domain.Sort; + import org.springframework.data.jpa.domain.Specification; +====1 +1:57,61c + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.PreparedStatementCreator; + import org.springframework.jdbc.core.PreparedStatementSetter; + import org.springframework.jdbc.support.GeneratedKeyHolder; + import org.springframework.jdbc.support.KeyHolder; +2:93a +3:93a +====1 +1:70c + import javax.annotation.PostConstruct; +2:102,105c +3:102,105c + import javax.persistence.criteria.CriteriaBuilder; + import javax.persistence.criteria.CriteriaQuery; + import javax.persistence.criteria.Predicate; + import javax.persistence.criteria.Root; +====1 +1:72,75c + import java.sql.Connection; + import java.sql.PreparedStatement; + import java.sql.SQLException; + import java.sql.Statement; +2:106a +3:106a +====1 +1:81a +2:113c +3:113c + import java.util.stream.Collectors; +====1 +1:83,97c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ADVANCE_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ALL_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4TAG_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_AGGR_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BASE_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_CHANGED_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_KEY_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_DETAIL_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_LIST_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.TENANT_INFO_ROW_MAPPER; +2:114a +3:114a +====1 +1:104a +2:122c +3:122c + @Slf4j +====1 +1:110c + private DataSourceService dataSourceService; +2:128,129c +3:128,129c + @Autowired + private ConfigInfoRepository configInfoRepository; +====1 +1:112c + private static final String SQL_FIND_ALL_CONFIG_INFO = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,c_schema from config_info"; +2:131,132c +3:131,132c + @Autowired + private ConfigInfoBetaRepository configInfoBetaRepository; +====1 +1:114c + private static final String SQL_TENANT_INFO_COUNT_BY_TENANT_ID = "select count(1) from tenant_info where tenant_id = ?"; +2:134,135c +3:134,135c + @Autowired + private ConfigInfoTagRepository configInfoTagRepository; +====1 +1:116c + private static final String SQL_FIND_CONFIG_INFO_BY_IDS = "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5 FROM config_info WHERE "; +2:137,138c +3:137,138c + @Autowired + private ConfigTagsRelationRepository configTagsRelationRepository; +====1 +1:118c + private static final String SQL_DELETE_CONFIG_INFO_BY_IDS = "DELETE FROM config_info WHERE "; +2:140,141c +3:140,141c + @Autowired + private HisConfigInfoRepository hisConfigInfoRepository; +====1 +1:120c + private static final String PATTERN_STR = "*"; +2:143,144c +3:143,144c + @Autowired + private TenantInfoRepository tenantInfoRepository; +====1 +1:122c + private static final int QUERY_LIMIT_SIZE = 50; +2:146,147c +3:146,147c + @Autowired + private ConfigInfoAggrRepository configInfoAggrRepository; +====1 +1:124,126c + protected JdbcTemplate jt; + + protected TransactionTemplate tjt; +2:149,150c +3:149,150c + @Autowired + private TransactionTemplate tjt; +====1 +1:133,171c + /** + * init datasource. + */ + @PostConstruct + public void init() { + dataSourceService = DynamicDataSource.getInstance().getDataSource(); + + jt = getJdbcTemplate(); + tjt = getTransactionTemplate(); + } + + public boolean checkMasterWritable() { + return dataSourceService.checkMasterWritable(); + } + + public void setBasicDataSourceService(DataSourceService dataSourceService) { + this.dataSourceService = dataSourceService; + } + + public synchronized void reload() throws IOException { + this.dataSourceService.reload(); + } + + /** + * For unit testing. + */ + public JdbcTemplate getJdbcTemplate() { + return this.dataSourceService.getJdbcTemplate(); + } + + public TransactionTemplate getTransactionTemplate() { + return this.dataSourceService.getTransactionTemplate(); + } + + @SuppressWarnings("checkstyle:AbbreviationAsWordInName") + public String getCurrentDBUrl() { + return this.dataSourceService.getCurrentDbUrl(); + } + +2:156a +3:156a +====1 +1:174c + return new ExternalStoragePaginationHelperImpl(jt); +2:159c +3:159c + return null; +====1 +1:182,191c + boolean result = tjt.execute(status -> { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:167,183c +3:167,183c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfoEntity.getGroupId(), + configInfoEntity.getTenantId()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; + } + return Boolean.TRUE; +====1 +1:193c + return Boolean.TRUE; +2:184a +3:184a +====1 +1:202c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:193c +3:193c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:204,207c + jt.update("INSERT INTO config_info_beta(data_id,group_id,tenant_id,app_name,content,md5,beta_ips,src_ip," + + "src_user,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, appNameTmp, configInfo.getContent(), md5, betaIps, srcIp, srcUser, + time, time); +2:195,207c +3:195,207c + ConfigInfoBetaEntity configInfoBeta = new ConfigInfoBetaEntity(); + configInfoBeta.setDataId(configInfo.getDataId()); + configInfoBeta.setGroupId(configInfo.getGroup()); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setBetaIps(betaIps); + configInfoBeta.setMd5(md5); + configInfoBeta.setGmtCreate(time); + configInfoBeta.setGmtModified(time); + configInfoBeta.setSrcUser(srcUser); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setTenantId(tenantTmp); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:209c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:209c +3:209c + log.error("[db-error] " + e.toString(), e); +====1 +1:220c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:219a +3:219a +====1 +1:222,226c + jt.update( + "INSERT INTO config_info_tag(data_id,group_id,tenant_id,tag_id,app_name,content,md5,src_ip,src_user," + + "gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, tagTmp, appNameTmp, configInfo.getContent(), md5, srcIp, srcUser, + time, time); +2:221,234c +3:221,234c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoTagEntity configInfoTag = new ConfigInfoTagEntity(); + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:228c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:236c +3:236c + log.error("[db-error] " + e.toString(), e); +====1 +1:236,254c + boolean result = tjt.execute(status -> { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + /* + If the appName passed by the user is not empty, use the persistent user's appName, + otherwise use db; when emptying appName, you need to pass an empty string + */ + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // delete all tags and then recreate + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); +2:244,268c +3:244,268c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + // 用户传过来的appName不为空,则用持久化用户的appName,否则用db的;清空appName的时候需要传空串 + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + configInfo.setId(oldConfigInfo.getId()); + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // 删除所有tag,然后再重新创建 + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); + } + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:256,259c + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:270c +3:270c + return Boolean.TRUE; +====1 +1:261c + return Boolean.TRUE; +2:271a +3:271a +====1 +1:268c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); +2:277a +3:277a +====1 +1:270,275c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + try { + jt.update( + "UPDATE config_info_beta SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5, srcIp, srcUser, + time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp); +2:279,300c +3:279,300c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(configInfo.getDataId())) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(configInfo.getDataId())); + } + if (StringUtils.isNotBlank(configInfo.getGroup())) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(configInfo.getGroup())); + } + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenantTmp)); + } + ConfigInfoBetaEntity configInfoBeta = configInfoBetaRepository.findOne(booleanBuilder).orElse(null); + try { + String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setMd5(md5); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setSrcUser(srcUser); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:277c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:302c +3:302c + log.error("[db-error] " + e.toString(), e); +====1 +1:288,293c + try { + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + jt.update( + "UPDATE config_info_tag SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", configInfo.getContent(), md5, + srcIp, srcUser, time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp, tagTmp); +2:313,331c +3:313,331c + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + ConfigInfoTagEntity configInfoTag = configInfoTagRepository.findOne( + qConfigInfoTag.dataId.eq(configInfo.getDataId()).and(qConfigInfoTag.groupId.eq(configInfo.getGroup())) + .and(qConfigInfoTag.tenantId.eq(tenantTmp)).and(qConfigInfoTag.tagId.eq(tagTmp))) + .orElse(new ConfigInfoTagEntity()); + try { + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:295c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:333c +3:333c + log.error("[db-error] " + e.toString(), e); +====1 +1:323,330c + try { + jt.update( + "UPDATE config_info SET md5 = ? WHERE data_id=? AND group_id=? AND tenant_id=? AND gmt_modified=?", + md5, dataId, group, tenantTmp, lastTime); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:361,378c +3:361,378c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (lastTime != null) { + booleanBuilder.and(qConfigInfo.gmtModified.eq(lastTime)); + } + configInfoRepository.findOne(booleanBuilder).ifPresent(config -> { + config.setMd5(md5); + configInfoRepository.save(config); + }); +====1 +1:416,421c + tjt.execute(status -> { + try { + ConfigInfo configInfo = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo != null) { + jt.update("DELETE FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, + group, tenantTmp); +2:464,474c +3:464,474c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo4Beta configInfo4Beta = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo4Beta != null) { + configInfoBetaRepository.deleteById(configInfo4Beta.getId()); + } + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:423,425c + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:476c +3:476c + return Boolean.TRUE; +====1 +1:427c + return Boolean.TRUE; +2:477a +3:477a +====1 +1:439,442c + String select = "SELECT content FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + String insert = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + String update = "UPDATE config_info_aggr SET content = ? , gmt_modified = ? WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + +2:489,502c +3:489,502c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } +====1 +1:445,446c + String dbContent = jt + .queryForObject(select, new Object[] {dataId, group, tenantTmp, datumId}, String.class); +2:505c +3:505c + ConfigInfoAggrEntity result = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); +====1 +1:448c + if (dbContent != null && dbContent.equals(content)) { +2:507c +3:507c + if (result.getContent() != null && result.getContent().equals(content)) { +====1 +1:451c + return jt.update(update, content, now, dataId, group, tenantTmp, datumId) > 0; +2:510,513c +3:510,513c + result.setContent(content); + result.setGmtModified(now); + configInfoAggrRepository.save(result); + return true; +====1 +1:454c + return jt.update(insert, dataId, group, tenantTmp, datumId, appNameTmp, content, now) > 0; +2:516,526c +3:516,526c + ConfigInfoAggrEntity configInfoAggrEntity = new ConfigInfoAggrEntity(); + configInfoAggrEntity.setDataId(dataId); + configInfoAggrEntity.setGroupId(group); + configInfoAggrEntity.setDatumId(datumId); + configInfoAggrEntity.setContent(content); + configInfoAggrEntity.setGmtModified(now); + configInfoAggrEntity.setAppName(appNameTmp); + configInfoAggrEntity.setTenantId(tenantTmp); + configInfoAggrRepository.save(configInfoAggrEntity); + return true; + +====1 +1:465,466c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; +2:536a +3:536a +====1 +1:468,482c + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index++, tenantTmp); + ps.setString(index, datumId); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:537a +3:537a +====1 +1:487,502c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=?"; + + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index, tenantTmp); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:542,545c +3:542,545c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); +====1 +1:503a +2:547,553c +3:547,553c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + configInfoAggrRepository.findOne(booleanBuilder).ifPresent(aggr -> configInfoAggrRepository.delete(aggr)); +====1 +1:509,523c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final StringBuilder datumString = new StringBuilder(); + for (String datum : datumList) { + datumString.append("'").append(datum).append("',"); + } + datumString.deleteCharAt(datumString.length() - 1); + final String sql = + "delete from config_info_aggr where data_id=? and group_id=? and tenant_id=? and datum_id in (" + + datumString.toString() + ")"; + try { + jt.update(sql, dataId, group, tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:558a +3:558a +====1 +1:529,536c + String sql = "delete from his_config_info where gmt_modified < ? limit ?"; + PaginationHelper helper = createPaginationHelper(); + try { + helper.updateLimit(sql, new Object[] {startTime, limitSize}); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:564,567c +3:564,567c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository + .findAll(qHisConfigInfo.gmtModified.lt(startTime), PageRequest.of(0, limitSize)); + hisConfigInfoRepository.deleteAll(iterable); +====1 +1:541,542c + String sql = "SELECT COUNT(*) FROM his_config_info WHERE gmt_modified < ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {startTime}); +2:572,573c +3:572,573c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Long result = hisConfigInfoRepository.count(qHisConfigInfo.gmtModified.lt(startTime)); +====1 +1:551c + String sql = "SELECT max(id) FROM config_info"; +2:581a +3:581a +====1 +1:553c + return jt.queryForObject(sql, Long.class); +2:583,584c +3:583,584c + //TODO 关系型特性查询 + return configInfoRepository.findConfigMaxId(); +====1 +1:591,617c + try { + Boolean isReplaceOk = tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + String appNameTmp = appName == null ? "" : appName; + removeAggrConfigInfo(dataId, group, tenant); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + for (Map.Entry datumEntry : datumMap.entrySet()) { + jt.update(sql, dataId, group, tenantTmp, datumEntry.getKey(), appNameTmp, + datumEntry.getValue(), new Timestamp(System.currentTimeMillis())); + } + } catch (Throwable e) { + throw new TransactionSystemException("error in addAggrConfigInfo"); + } + return Boolean.TRUE; + } + }); + if (isReplaceOk == null) { + return false; + } + return isReplaceOk; + } catch (TransactionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:622c +3:622c + return true; +====1 +1:624,636c + String sql = "SELECT DISTINCT data_id, group_id FROM config_info"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:629c +3:629c + return null; +====1 +1:641,651c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,beta_ips FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO4BETA_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:634,647c +3:634,647c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenant)); + } + ConfigInfoBetaEntity configInfoBetaEntity = configInfoBetaRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoBeta data null")); + return ConfigInfo4BetaMapStruct.INSTANCE.convertConfigInfo4Beta(configInfoBetaEntity); +====1 +1:659,668c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,tag_id,app_name,content FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", + new Object[] {dataId, group, tenantTmp, tagTmp}, CONFIG_INFO4TAG_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:655,671c +3:655,671c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + ConfigInfoTagEntity result = configInfoTagRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoTag data null")); + return ConfigInfo4TagMapStruct.INSTANCE.convertConfigInfo4Tag(result); +====1 +1:674,684c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=? AND app_name=?", + new Object[] {dataId, group, tenantTmp, appName}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:677c +3:677c + return null; +====1 +1:690,733c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(group); + paramList.add(tenantTmp); + + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and group_id=? and tenant_id=? "); + if (StringUtils.isNotBlank(configTags)) { + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.group_id=? and a.tenant_id=? "); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sql.append(", "); + } + sql.append("?"); + paramList.add(tagArr[i]); + } + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return this.jt.queryForObject(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:683c +3:683c + return null; +====1 +1:739,748c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, StringUtils.EMPTY}, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:689,695c +3:689,695c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + return configInfoRepository.findOne(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group))).map(s -> { + ConfigInfoBase configInfoBase = new ConfigInfoBase(); + BeanUtils.copyProperties(s, configInfoBase); + configInfoBase.setGroup(s.getGroupId()); + return configInfoBase; + }).orElse(null); +====1 +1:753,762c + try { + return this.jt + .queryForObject("SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE ID=?", + new Object[] {id}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:700,701c +3:700,701c + + return null; +====1 +1:767,776c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5,type FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:706,712c +3:706,712c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:777a +2:714,718c +3:714,718c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity result = configInfoRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfo(result); +====1 +1:783,792c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:724,725c +3:724,725c + + return null; +====1 +1:798,807c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? and app_name=?", + new Object[] {dataId, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:731,732c +3:731,732c + + return null; +====1 +1:813,864c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where data_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:738c +3:738c + return null; +====1 +1:870,871c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); +2:743a +3:743a +====1 +1:874,922c + String sqlCount = "select count(*) from config_info"; + String sql = "select ID,data_id,group_id,tenant_id,app_name,content,type from config_info"; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id"; + sql = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id"; + + where.append(" a.tenant_id=? "); + + if (StringUtils.isNotBlank(dataId)) { + where.append(" and a.data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and a.group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and a.app_name=? "); + paramList.add(appName); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id=? "); + if (StringUtils.isNotBlank(dataId)) { + where.append(" and data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and app_name=? "); + paramList.add(appName); + } +2:746,765c +3:746,765c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + } + + private void buildConfigInfoCommonCondition(BooleanBuilder booleanBuilder, QConfigInfoEntity qConfigInfo, + final String dataId, final String group, final String appName) { + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); +====1 +1:924,929c + try { + return helper.fetchPage(sqlCount + where, sql + where, paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:767,771c +3:767,771c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(appName)) { + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:935,943c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:777c +3:777c + return null; +====1 +1:949,958c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:783c +3:783c + return null; +====1 +1:964,973c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=? and app_name =?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? and app_name =?", + new Object[] {group, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:789c +3:789c + return null; +====1 +1:979,1032c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder( + "select count(*) from config_info where group_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(group); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:795c +3:795c + return null; +====1 +1:1038,1047c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where tenant_id like ? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? and app_name=?", + new Object[] {generateLikeArgument(tenantTmp), appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:801c +3:801c + return null; +====1 +1:1053,1104c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where tenant_id like ? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:807c +3:807c + return null; +====1 +1:1110,1118c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:813c +3:813c + return null; +====1 +1:1123,1124c + String sql = " SELECT COUNT(ID) FROM config_info "; + Integer result = jt.queryForObject(sql, Integer.class); +2:818c +3:818c + Long result = configInfoRepository.count(); +====1 +1:1133,1134c + String sql = " SELECT COUNT(ID) FROM config_info where tenant_id like ?"; + Integer result = jt.queryForObject(sql, new Object[] {tenant}, Integer.class); +2:827,828c +3:827,828c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.tenantId.like(tenant)); +====1 +1:1143,1144c + String sql = " SELECT COUNT(ID) FROM config_info_beta "; + Integer result = jt.queryForObject(sql, Integer.class); +2:837c +3:837c + Long result = configInfoBetaRepository.count(); +====1 +1:1153,1154c + String sql = " SELECT COUNT(ID) FROM config_info_tag "; + Integer result = jt.queryForObject(sql, Integer.class); +2:846c +3:846c + Long result = configInfoTagRepository.count(); +====1 +1:1162,1165c + public List getTenantIdList(int page, int pageSize) { + String sql = "SELECT tenant_id FROM config_info WHERE tenant_id != '' GROUP BY tenant_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:854,864c +3:854,864c + public List getTenantIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("tenantId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1169,1172c + public List getGroupIdList(int page, int pageSize) { + String sql = "SELECT group_id FROM config_info WHERE tenant_id ='' GROUP BY group_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:868,878c +3:868,878c + public List getGroupIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("groupId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1178,1179c + String sql = " SELECT COUNT(ID) FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {dataId, group, tenantTmp}); +2:884,886c +3:884,886c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))); +====1 +1:1188,1213c + if (datumIds == null || datumIds.isEmpty()) { + return 0; + } + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder sql = new StringBuilder( + " SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ? and datum_id"); + if (isIn) { + sql.append(" in ("); + } else { + sql.append(" not in ("); + } + for (int i = 0, size = datumIds.size(); i < size; i++) { + if (i > 0) { + sql.append(", "); + } + sql.append("?"); + } + sql.append(")"); + + List objectList = Lists.newArrayList(dataId, group, tenantTmp); + objectList.addAll(datumIds); + Integer result = jt.queryForObject(sql.toString(), Integer.class, objectList.toArray()); + if (result == null) { + throw new IllegalArgumentException("aggrConfigInfoCount error"); + } + return result.intValue(); +2:895c +3:895c + return 0; +====1 +1:1228,1242c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5 " + + " FROM ( SELECT id FROM config_info WHERE tenant_id like ? ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, + new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:910c +3:910c + return null; +====1 +1:1247,1282c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String select = " SELECT data_id,group_id,app_name FROM ( " + + " SELECT id FROM config_info WHERE tenant_id LIKE ? ORDER BY id LIMIT ?, ? )" + + " g, config_info t WHERE g.id = t.id "; + + final int totalCount = configInfoCount(tenant); + int pageCount = totalCount / pageSize; + if (totalCount > pageSize * pageCount) { + pageCount++; + } + + if (pageNo > pageCount) { + return null; + } + + final Page page = new Page(); + page.setPageNumber(pageNo); + page.setPagesAvailable(pageCount); + page.setTotalCount(totalCount); + + try { + List result = jt + .query(select, new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, + // new Object[0], + CONFIG_KEY_ROW_MAPPER); + + for (ConfigKey item : result) { + page.getPageItems().add(item); + } + return page; + } catch (EmptyResultDataAccessException e) { + return page; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:915c +3:915c + return null; +====1 +1:1288,1300c + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,content,md5" + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) " + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:921c +3:921c + return null; +====1 +1:1305,1319c + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = " SELECT t.id,type,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + + List params = new ArrayList(); + + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, params.toArray(), pageNo, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:926c +3:926c + return null; +====1 +1:1324,1332c + String select = "SELECT id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,type from config_info where id > ? order by id asc limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(select, new Object[] {lastMaxId, 0, pageSize}, 1, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:931,940c +3:931,940c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(qConfigInfo.id.gt(lastMaxId), PageRequest.of(0, pageSize, Sort.by(Sort.Order.asc("id")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1337,1349c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_beta"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,beta_ips " + + " FROM ( SELECT id FROM config_info_beta ORDER BY id LIMIT ?,? )" + + " g, config_info_beta t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:945,952c +3:945,952c + org.springframework.data.domain.Page sPage = configInfoBetaRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoBetaWrapperMapStruct.INSTANCE.convertConfigInfoBetaWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1354,1366c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_tag"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,tag_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info_tag ORDER BY id LIMIT ?,? ) " + + "g, config_info_tag t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:957,964c +3:957,964c + org.springframework.data.domain.Page sPage = configInfoTagRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoTagWrapperMapStruct.INSTANCE.convertConfigInfoTagWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1372,1414c + // assert dataids group not null + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + // if dataids empty return empty list + if (CollectionUtils.isEmpty(dataIds)) { + return Collections.emptyList(); + } + + // Batch query limit + // The number of in is controlled within 100, the shorter the length of the SQL statement, the better + if (subQueryLimit > QUERY_LIMIT_SIZE) { + subQueryLimit = 50; + } + List result = new ArrayList(dataIds.size()); + + String sqlStart = "select data_id, group_id, tenant_id, app_name, content from config_info where group_id = ? and tenant_id = ? and data_id in ("; + String sqlEnd = ")"; + StringBuilder subQuerySql = new StringBuilder(); + + for (int i = 0; i < dataIds.size(); i += subQueryLimit) { + // dataids + List params = new ArrayList( + dataIds.subList(i, i + subQueryLimit < dataIds.size() ? i + subQueryLimit : dataIds.size())); + + for (int j = 0; j < params.size(); j++) { + subQuerySql.append("?"); + if (j != params.size() - 1) { + subQuerySql.append(","); + } + } + + // group + params.add(0, group); + params.add(1, tenantTmp); + + List r = this.jt + .query(sqlStart + subQuerySql.toString() + sqlEnd, params.toArray(), CONFIG_INFO_ROW_MAPPER); + + // assert not null + if (r != null && r.size() > 0) { + result.addAll(r); + } + } + return result; +2:970c +3:970c + return null; +====1 +1:1420,1463c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + if (StringUtils.isBlank(appName)) { + return this.findAllConfigInfo(pageNo, pageSize, tenantTmp); + } else { + return this.findConfigInfoByApp(pageNo, pageSize, tenantTmp, appName); + } + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + where += " and tenant_id like ? "; + params.add(generateLikeArgument(tenantTmp)); + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:976c +3:976c + return null; +====1 +1:1469,1562c + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + for (ConfigKey configInfo : configKeys) { + String dataId = configInfo.getDataId(); + String group = configInfo.getGroup(); + String appName = configInfo.getAppName(); + + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + return helper.fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:982c +3:982c + return null; +====1 +1:1572,1636c + PaginationHelper helper = createPaginationHelper(); + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info"; + StringBuilder where = new StringBuilder(" where "); + List params = new ArrayList(); + params.add(generateLikeArgument(tenantTmp)); + if (StringUtils.isNotBlank(configTags)) { + sqlCountRows = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id "; + sqlFetchRows = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join config_tags_relation b on a.id=b.id "; + + where.append(" a.tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and a.data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and a.group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and a.app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and a.content like ? "); + params.add(generateLikeArgument(content)); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + params.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and content like ? "); + params.add(generateLikeArgument(content)); + } + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:992,1008c +3:992,1008c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.like(tenant)); + } + if (StringUtils.isNotBlank(content)) { + booleanBuilder.and(qConfigInfo.content.like(content)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1642,1672c + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + throw new IOException("invalid param"); + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,content from config_info where "; + String where = " 1=1 and tenant_id='' "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1014c +3:1014c + return null; +====1 +1:1678,1691c + String sql = "SELECT id,data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; + + try { + return this.jt + .queryForObject(sql, new Object[] {dataId, group, tenantTmp, datumId}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + // EmptyResultDataAccessException, indicating that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); +2:1020,1026c +3:1020,1026c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); +====1 +1:1692a +2:1028,1036c +3:1028,1036c + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenantTmp)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } + + ConfigInfoAggrEntity configInfoAggrEntity = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggr(configInfoAggrEntity); +====1 +1:1697,1710c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "SELECT data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? ORDER BY datum_id"; + + try { + return this.jt.query(sql, new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1041c +3:1041c + return null; +====1 +1:1717,1730c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where data_id=? and " + + "group_id=? and tenant_id=? order by datum_id limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, new Object[] {dataId, group, tenantTmp}, sqlFetchRows, + new Object[] {dataId, group, tenantTmp, (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_AGGR_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1048,1058c +3:1048,1058c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + org.springframework.data.domain.Page sPage = configInfoAggrRepository.findAll( + qConfigInfoAggr.dataId.eq(dataId).and(qConfigInfoAggr.groupId.eq(group)) + .and(qConfigInfoAggr.tenantId.eq(tenantTmp)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.by("datumId")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggrList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1737,1831c + String sqlCountRows = "select count(*) from config_info_aggr where "; + String sqlFetchRows = "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + + for (ConfigKey configInfoAggr : configKeys) { + String dataId = configInfoAggr.getDataId(); + String group = configInfoAggr.getGroup(); + String appName = configInfoAggr.getAppName(); + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + Page result = helper + .fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_AGGR_ROW_MAPPER); + return result; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1065c +3:1065c + return null; +====1 +1:1836,1848c + String sql = "SELECT DISTINCT data_id, group_id, tenant_id FROM config_info_aggr"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_CHANGED_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1070,1071c +3:1070,1071c + List list = configInfoAggrRepository.findAllAggrGroup(); + return ConfigInfoChangedMapStruct.INSTANCE.convertConfigInfoChangedList(list); +====1 +1:1853,1864c + String sql = "SELECT datum_id FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND content = ? "; + + try { + return this.jt.queryForList(sql, new Object[] {dataId, groupId, content}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1076c +3:1076c + return null; +====1 +1:1869,1877c + try { + List> list = jt.queryForList( + "SELECT data_id, group_id, tenant_id, app_name, content, gmt_modified FROM config_info WHERE gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertChangeConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1081,1084c +3:1081,1084c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Iterable iterable = configInfoRepository + .findAll(qConfigInfo.gmtModified.goe(startTime).and(qConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList((List) iterable); +====1 +1:1884,1924c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_modified from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + if (!StringUtils.isBlank(tenantTmp)) { + where += " and tenant_id = ? "; + params.add(tenantTmp); + } + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (startTime != null) { + where += " and gmt_modified >=? "; + params.add(startTime); + } + if (endTime != null) { + where += " and gmt_modified <=? "; + params.add(endTime); + } + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + lastMaxId, CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1091c +3:1091c + return null; +====1 +1:1929,1937c + try { + List> list = jt.queryForList( + "SELECT DISTINCT data_id, group_id, tenant_id FROM his_config_info WHERE op_type = 'D' AND gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertDeletedConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1096,1100c +3:1096,1100c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository.findAll( + qHisConfigInfo.opType.eq("D").and(qHisConfigInfo.gmtModified.goe(startTime)) + .and(qHisConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList((List) iterable); +====1 +1:1943,1947c + final String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + final String tenantTmp = + StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + +2:1105a +3:1105a +====1 +1:1953,1960c + + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + + KeyHolder keyHolder = new GeneratedKeyHolder(); + + final String sql = + "INSERT INTO config_info(data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_create," + + "gmt_modified,c_desc,c_use,effect,type,c_schema) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; +2:1111,1120c +3:1111,1120c + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setGmtCreate(time); + configInfoEntity.setGmtModified(time); +====1 +1:1963,1991c + jt.update(new PreparedStatementCreator() { + @Override + public PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + PreparedStatement ps = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS); + ps.setString(1, configInfo.getDataId()); + ps.setString(2, configInfo.getGroup()); + ps.setString(3, tenantTmp); + ps.setString(4, appNameTmp); + ps.setString(5, configInfo.getContent()); + ps.setString(6, md5Tmp); + ps.setString(7, srcIp); + ps.setString(8, srcUser); + ps.setTimestamp(9, time); + ps.setTimestamp(10, time); + ps.setString(11, desc); + ps.setString(12, use); + ps.setString(13, effect); + ps.setString(14, type); + ps.setString(15, schema); + return ps; + } + }, keyHolder); + Number nu = keyHolder.getKey(); + if (nu == null) { + throw new IllegalArgumentException("insert config_info fail"); + } + return nu.longValue(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:1123,1124c +3:1123,1124c + return configInfoRepository.save(configInfoEntity).getId(); + } catch (Exception e) { +====1 +1:2008,2015c + try { + jt.update( + "INSERT INTO config_tags_relation(id,tag_name,tag_type,data_id,group_id,tenant_id) VALUES(?,?,?,?,?,?)", + configId, tagName, null, dataId, group, tenant); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1141,1147c +3:1141,1147c + ConfigTagsRelationEntity configTagsRelation = new ConfigTagsRelationEntity(); + configTagsRelation.setId(configId); + configTagsRelation.setTagName(tagName); + configTagsRelation.setDataId(dataId); + configTagsRelation.setGroupId(group); + configTagsRelation.setTenantId(tenant); + configTagsRelationRepository.save(configTagsRelation); +====1 +1:2020,2025c + try { + jt.update("DELETE FROM config_tags_relation WHERE id=?", id); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1152c +3:1152c + configTagsRelationRepository.findById(id).ifPresent(s -> configTagsRelationRepository.delete(s)); +====1 +1:2030,2040c + String sql = "SELECT tag_name FROM config_tags_relation WHERE tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1157c +3:1157c + return null; +====1 +1:2045,2055c + String sql = "SELECT tag_name FROM config_tags_relation WHERE data_id=? AND group_id=? AND tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {dataId, group, tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1162,1176c +3:1162,1176c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigTagsRelationEntity qConfigTagsRelation = QConfigTagsRelationEntity.configTagsRelationEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigTagsRelation.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigTagsRelation.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigTagsRelation.tenantId.eq(tenant)); + } + Iterable iterable = configTagsRelationRepository.findAll(booleanBuilder); + List result = new ArrayList<>(); + iterable.forEach(s -> result.add(s.getTagName())); + return result; +====1 +1:2061,2067c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + jt.update("DELETE FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, group, + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1182,1187c +3:1182,1187c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); +====1 +1:2068a +2:1189,1190c +3:1189,1190c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + configInfos.forEach(s -> configInfoRepository.delete(s)); +====1 +1:2076,2077c + StringBuilder sql = new StringBuilder(SQL_DELETE_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1198,1200c +3:1198,1200c + if (StringUtils.isBlank(ids)) { + return; + } +====1 +1:2081,2084c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1203a +3:1203a +====1 +1:2087,2093c + sql.append(") "); + try { + jt.update(sql.toString(), paramList.toArray()); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1206,1219c +3:1206,1219c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + for (Long id : paramList) { + configInfoRepository.deleteById(id); + } + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2099,2106c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String tagTmp = StringUtils.isBlank(tag) ? StringUtils.EMPTY : tag; + try { + jt.update("DELETE FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", dataId, + group, tenantTmp, tagTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1225,1234c +3:1225,1234c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); +====1 +1:2107a +2:1236,1251c +3:1236,1251c + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + Iterable configInfoTags = configInfoTagRepository.findAll(booleanBuilder); + configInfoTags.forEach(s -> configInfoTagRepository.delete(s)); + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2113,2115c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String tenantTmp = StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:1257,1258c +3:1257,1258c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:2122,2131c + try { + jt.update("UPDATE config_info SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?," + + "app_name=?,c_desc=?,c_use=?,effect=?,type=?,c_schema=? " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5Tmp, srcIp, srcUser, + time, appNameTmp, desc, use, effect, type, schema, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1265,1272c +3:1265,1272c + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setGmtModified(time); + configInfoRepository.save(configInfoEntity); +====1 +1:2139,2140c + StringBuilder sql = new StringBuilder(SQL_FIND_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1279a +3:1279a +====1 +1:2144,2147c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1282a +3:1282a +====1 +1:2150,2158c + sql.append(") "); + try { + return this.jt.query(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1285,1288c +3:1285,1288c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + List list = (List) configInfoRepository + .findAll(qConfigInfo.id.in(paramList)); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(list); +====1 +1:2163,2176c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAdvanceInfo configAdvance = this.jt.queryForObject( + "SELECT gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_ADVANCE_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1293,1314c +3:1293,1314c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAdvanceInfo configAdvance = ConfigAdvanceInfoMapStruct.INSTANCE.convertConfigAdvanceInfo(configInfo); + List configTagList = this.selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2178c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1315a +3:1315a +====1 +1:2180,2185c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1317c +3:1317c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2186a +2:1319c +3:1319c + return configAdvance; +====1 +1:2191,2206c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAllInfo configAdvance = this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5," + + "gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_ALL_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1324,1346c +3:1324,1346c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAllInfo configAdvance = ConfigAllInfoMapStruct.INSTANCE.convertConfigAllInfo(configInfo); + configAdvance.setGroup(configInfo.getGroupId()); + List configTagList = selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2208c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1347a +3:1347a +====1 +1:2210,2215c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1349c +3:1349c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2216a +2:1351c +3:1351c + return configAdvance; +====1 +1:2225,2233c + try { + jt.update( + "INSERT INTO his_config_info (id,data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_modified,op_type) " + + "VALUES(?,?,?,?,?,?,?,?,?,?,?)", id, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp, appNameTmp, configInfo.getContent(), md5Tmp, srcIp, srcUser, time, ops); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1360,1373c +3:1360,1373c + HisConfigInfoEntity hisConfigInfo = new HisConfigInfoEntity(); + hisConfigInfo.setId(id); + hisConfigInfo.setDataId(configInfo.getDataId()); + hisConfigInfo.setGroupId(configInfo.getGroup()); + hisConfigInfo.setAppName(appNameTmp); + hisConfigInfo.setContent(configInfo.getContent()); + hisConfigInfo.setMd5(md5Tmp); + hisConfigInfo.setGmtModified(time); + hisConfigInfo.setSrcUser(srcUser); + hisConfigInfo.setSrcIp(srcIp); + hisConfigInfo.setOpType(ops); + hisConfigInfo.setTenantId(tenantTmp); + hisConfigInfo.setGmtCreate(time); + hisConfigInfoRepository.save(hisConfigInfo); +====1 +1:2239,2255c + PaginationHelper helper = createPaginationHelper(); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from his_config_info where data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select nid,data_id,group_id,tenant_id,app_name,src_ip,src_user,op_type,gmt_create,gmt_modified from his_config_info " + + "where data_id = ? and group_id = ? and tenant_id = ? order by nid desc"; + + Page page = null; + try { + page = helper + .fetchPage(sqlCountRows, sqlFetchRows, new Object[] {dataId, group, tenantTmp}, pageNo, pageSize, + HISTORY_LIST_ROW_MAPPER); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG + .error("[list-config-history] error, dataId:{}, group:{}", new Object[] {dataId, group}, e); + throw e; + } +2:1379,1389c +3:1379,1389c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + org.springframework.data.domain.Page sPage = hisConfigInfoRepository.findAll( + qHisConfigInfo.dataId.eq(dataId).and(qHisConfigInfo.groupId.eq(group)) + .and(qHisConfigInfo.tenantId.eq(tenant)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("nid")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); +====1 +1:2262,2270c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "INSERT INTO app_configdata_relation_subs(data_id,group_id,app_name,gmt_modified) VALUES(?,?,?,?)", + dataId, group, appNameTmp, date); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1395a +3:1395a +====1 +1:2276,2284c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "UPDATE app_configdata_relation_subs SET gmt_modified=? WHERE data_id=? AND group_id=? AND app_name=?", + time, dataId, group, appNameTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1401c +3:1401c + +==== +1:2289,2300c + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[list-config-history] error, nid:{}", new Object[] {nid}, e); + throw e; + } + } + + @Override +2:1406,1411c + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + } + + @Override +3:1406,1424c + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + } + + @Override + public ConfigHistoryInfo detailPreviousConfigHistory(Long id) { + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = (select max(nid) from his_config_info where id = ?) "; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {id}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[detail-previous-config-history] error, id:{}", new Object[] {id}, e); + throw e; + } + } + + @Override +====1 +1:2303,2310c + try { + jt.update( + "INSERT INTO tenant_info(kp,tenant_id,tenant_name,tenant_desc,create_source,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?)", + kp, tenantId, tenantName, tenantDesc, createResoure, time, time); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1414,1422c +3:1427,1435c + TenantInfoEntity tenantInfo = new TenantInfoEntity(); + tenantInfo.setKp(kp); + tenantInfo.setTenantId(tenantId); + tenantInfo.setTenantName(tenantName); + tenantInfo.setTenantDesc(tenantDesc); + tenantInfo.setCreateSource(createResoure); + tenantInfo.setGmtCreate(time); + tenantInfo.setGmtModified(time); + tenantInfoRepository.save(tenantInfo); +====1 +1:2315,2322c + try { + jt.update( + "UPDATE tenant_info SET tenant_name = ?, tenant_desc = ?, gmt_modified= ? WHERE kp=? AND tenant_id=?", + tenantName, tenantDesc, System.currentTimeMillis(), kp, tenantId); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1427,1432c +3:1440,1445c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + tenantInfoRepository.findOne(qTenantInfo.kp.eq(kp).and(qTenantInfo.tenantId.eq(tenantId))).ifPresent(s -> { + s.setTenantName(tenantName); + s.setTenantDesc(tenantDesc); + tenantInfoRepository.save(s); + }); +====1 +1:2327,2338c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=?"; + try { + return this.jt.query(sql, new Object[] {kp}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1437,1438c +3:1450,1451c + List list = tenantInfoRepository.findByKp(kp); + return TenantInfoMapStruct.INSTANCE.convertTenantInfoList(list); +====1 +1:2343,2354c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=? AND tenant_id=?"; + try { + return jt.queryForObject(sql, new Object[] {kp, tenantId}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1443,1444c +3:1456,1457c + TenantInfoEntity tenantInfoEntity = tenantInfoRepository.findByKpAndTenantId(kp, tenantId); + return TenantInfoMapStruct.INSTANCE.convertTenantInfo(tenantInfoEntity); +====1 +1:2359,2364c + try { + jt.update("DELETE FROM tenant_info WHERE kp=? AND tenant_id=?", kp, tenantId); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1449,1450c +3:1462,1463c + tenantInfoRepository.findOne(QTenantInfoEntity.tenantInfoEntity.tenantId.eq(tenantId) + .and(QTenantInfoEntity.tenantInfoEntity.kp.eq(kp))).ifPresent(s -> tenantInfoRepository.delete(s)); +====1 +1:2418,2431c + String sqlCountRows = " SELECT COUNT(*) FROM config_info "; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,md5,type,gmt_modified FROM " + + "( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) g, config_info t WHERE g.id = t.id"; + PaginationHelper helper = createPaginationHelper(); + try { + Page page = helper + .fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_WRAPPER_ROW_MAPPER); + + return page.getPageItems(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1504c +3:1517c + return null; +====1 +1:2448,2458c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,type,gmt_modified,md5 FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1521,1525c +3:1534,1538c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + ConfigInfoEntity result = configInfoRepository.findOne( + qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))).orElse(null); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapper(result); +====1 +1:2463,2469c + String sql = String.format("select 1 from %s limit 1", tableName); + try { + jt.queryForObject(sql, Integer.class); + return true; + } catch (Throwable e) { + return false; + } +2:1530c +3:1543c + return true; +====1 +1:2518,2530c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList<>(); + if (!CollectionUtils.isEmpty(ids)) { + where.append(" id in ("); + for (int i = 0; i < ids.size(); i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(ids.get(i)); + } + where.append(") "); +2:1579,1582c +3:1592,1595c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (!org.springframework.util.CollectionUtils.isEmpty(ids)) { + booleanBuilder.and(qConfigInfo.id.in(ids)); +====1 +1:2532,2536c + where.append(" tenant_id=? "); + paramList.add(tenantTmp); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + paramList.add(generateLikeArgument(dataId)); +2:1584,1588c +3:1597,1601c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.like(dataId)); +====1 +1:2539,2540c + where.append(" and group_id=? "); + paramList.add(group); +2:1591c +3:1604c + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:2543,2544c + where.append(" and app_name=? "); + paramList.add(appName); +2:1594c +3:1607c + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:2547,2552c + try { + return this.jt.query(SQL_FIND_ALL_CONFIG_INFO + where, paramList.toArray(), CONFIG_ALL_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1597,1605c +3:1610,1618c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + List resultList = new ArrayList<>(); + configInfos.forEach(s -> { + ConfigAllInfo configAllInfo = new ConfigAllInfo(); + BeanUtils.copyProperties(s, configAllInfo); + configAllInfo.setGroup(s.getGroupId()); + resultList.add(configAllInfo); + }); + return resultList; +====1 +1:2643,2647c + Integer result = this.jt + .queryForObject(SQL_TENANT_INFO_COUNT_BY_TENANT_ID, new String[] {tenantId}, Integer.class); + if (result == null) { + return 0; + } +2:1696,1697c +3:1709,1710c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + Long result = tenantInfoRepository.count(qTenantInfo.tenantId.eq(tenantId)); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_HistoryController.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_HistoryController.java.txt new file mode 100644 index 0000000000..f20395adc2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_HistoryController.java.txt @@ -0,0 +1,94 @@ +====3 +1:31,33c +2:31,33c + import javax.servlet.http.HttpServletRequest; + import javax.servlet.http.HttpServletResponse; + +3:30a +====2 +1:42c +3:39c + +2:42c + +====2 +1:45c +3:42c + +2:45c + +====3 +1:49,53c +2:49,53c + * @param dataId dataId string value. + * @param group group string value. + * @param tenant tenant string value. + * @param appName appName string value. + * @param pageNo pageNo string value. +3:46,50c + * @param dataId dataId string value. + * @param group group string value. + * @param tenant tenant string value. + * @param appName appName string value. + * @param pageNo pageNo string value. +====2 +1:60,66c +3:57,63c + @RequestParam("group") String group, // + @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant, + @RequestParam(value = "appName", required = false) String appName, + @RequestParam(value = "pageNo", required = false) Integer pageNo, + // + @RequestParam(value = "pageSize", required = false) Integer pageSize, // + ModelMap modelMap) { +2:60,66c + @RequestParam("group") String group, // + @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant, + @RequestParam(value = "appName", required = false) String appName, + @RequestParam(value = "pageNo", required = false) Integer pageNo, + // + @RequestParam(value = "pageSize", required = false) Integer pageSize, // + ModelMap modelMap) { +====2 +1:73c +3:70c + +2:73c + +====3 +1:75c +2:75c + * Query the detailed configuration history informations. +3:72,75c + * Query the detailed configuration history information. + * + * @param nid history_config_info nid + * @return history config info +==== +1:78,79c + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { +2:78,79c + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { +3:78c + public ConfigHistoryInfo getConfigHistoryInfo(@RequestParam("nid") Long nid) { +==== +1:82c + +2:82c + +3:81,93c + + /** + * Query previous config history information. + * + * @param id config_info id + * @return history config info + * @since 1.4.0 + */ + @GetMapping(value = "/previous") + public ConfigHistoryInfo getPreviousConfigHistoryInfo(@RequestParam("id") Long id) { + return persistService.detailPreviousConfigHistory(id); + } + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_MergeDatumService.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_MergeDatumService.java.txt new file mode 100644 index 0000000000..154063ccc5 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_MergeDatumService.java.txt @@ -0,0 +1,35 @@ +====3 +1:30,31c +2:30,31c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +3:30,32c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.ApplicationUtils; + import com.alibaba.nacos.sys.utils.InetUtils; +====3 +1:109c +2:109c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIp()); +3:110c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIP()); +====3 +1:117c +2:117c + if (ApplicationUtils.getStandaloneMode()) { +3:118c + if (EnvUtil.getStandaloneMode()) { +==== +1:166,168c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn("[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); +2:166,169c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn( + "[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); +3:167,169c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIP(), null); + LOGGER.warn("[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_MergeTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_MergeTaskProcessor.java.txt new file mode 100644 index 0000000000..f8190d8c70 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_MergeTaskProcessor.java.txt @@ -0,0 +1,38 @@ +==== +1:20,21c + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:20c + import com.alibaba.nacos.common.task.AbstractDelayTask; +3:20,21c + import com.alibaba.nacos.common.task.NacosTask; + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:22a +2:22c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====3 +1:31c +2:31c + import com.alibaba.nacos.core.utils.InetUtils; +3:32c + import com.alibaba.nacos.sys.utils.InetUtils; +====3 +1:55c +2:55c + public boolean process(AbstractDelayTask task) { +3:56c + public boolean process(NacosTask task) { +====3 +1:87c +2:87c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +3:88c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), +====3 +1:101c +2:101c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +3:102c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_NotifyTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_NotifyTaskProcessor.java.txt new file mode 100644 index 0000000000..b34849e3c8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_NotifyTaskProcessor.java.txt @@ -0,0 +1,58 @@ +==== +1:20,21c + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:20c + import com.alibaba.nacos.common.task.AbstractDelayTask; +3:20,21c + import com.alibaba.nacos.common.task.NacosTask; + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:22a +2:22c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====3 +1:27,28c +2:27,28c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +3:28,29c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====3 +1:49c +2:49c + public boolean process(AbstractDelayTask task) { +3:50c + public boolean process(NacosTask task) { +====3 +1:76c +2:76c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +3:77c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====3 +1:78c +2:78c + .format(URL_PATTERN, serverIp, ApplicationUtils.getContextPath(), dataId, group); +3:79c + .format(URL_PATTERN, serverIp, EnvUtil.getContextPath(), dataId, group); +====3 +1:82c +2:82c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +3:83c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====3 +1:92c +2:92c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +3:93c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====3 +1:100c +2:100c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +3:101c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_application.properties.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_application.properties.txt new file mode 100644 index 0000000000..9395a60678 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_application.properties.txt @@ -0,0 +1,85 @@ +====3 +1:40,41c +2:40,41c + # db.user=nacos + # db.password=nacos +3:40,41c + # db.user.0=nacos + # db.password.0=nacos +====3 +1:112c +2:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-fe/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +3:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-ui/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +==== +1:177a +2:178,210c + + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + +3:178,211c + + + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_pom.xml.txt new file mode 100644 index 0000000000..25aac30ad6 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_adjacent/diff_pom.xml.txt @@ -0,0 +1,163 @@ +====3 +1:25c +2:25c + 1.4.0-SNAPSHOT +3:25c + 1.4.1-SNAPSHOT +====3 +1:39c +2:39c + nacos-all-1.4.0-SNAPSHOT +3:39c + nacos-all-1.4.1-SNAPSHOT +====3 +1:129c +2:129c + 2.1.16.RELEASE +3:129c + 2.1.17.RELEASE +====3 +1:131c +2:131c + 2.6 +3:130a +====1 +1:133c + 2.2 +2:133c +3:132c + 2.6 +====3 +1:144c +2:144c + 1.7.17 +3:142a +====1 +1:170a +2:171,177c +3:169,175c + 1.3.2.beta1 + 1.3.2.beta1 + 1.3.1.Final + 19.3.0.0 + 4.2.1 + 3.4.1 + 1.18.12 +====1 +1:279a +2:287,289c +3:285,287c + **/com/alibaba/nacos/config/server/modules/entity/*.java + **/com/alibaba/nacos/config/server/modules/mapstruct/*.java + **/com/alibaba/nacos/config/server/configuration/datasource/DynamicDataSource.java +==== +1:307c + **/istio/model/**,**/nacos/test/** +2:317c + **/istio/model/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** +3:315c + **/istio/model/**,**/consistency/entity/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** +====3 +1:341a +2:351a +3:350c + /console-ui/** +====3 +1:553a +2:563a +3:563,581c + + remove-test-data + + + + org.apache.maven.plugins + maven-clean-plugin + + false + + + ${user.home}/nacos/data + + + + + + + +====3 +1:581a +2:591a +3:610c + sys +====3 +1:688a +2:698a +3:718,722c + + ${project.groupId} + nacos-sys + ${project.version} + +====3 +1:712,717c +2:722,727c + + commons-lang + commons-lang + ${commons-lang.version} + + +3:745a +====3 +1:817,822c +2:827,832c + + com.ning + async-http-client + ${async-http-client.version} + + +3:844a +====1 +1:1027a +2:1038,1075c +3:1050,1087c + + + org.mapstruct + mapstruct-jdk8 + ${mapstruct.version} + + + + org.mapstruct + mapstruct-processor + ${mapstruct.version} + + + + org.projectlombok + lombok + true + ${lombok.version} + + + + com.querydsl + querydsl-jpa + ${querydsl.version} + + + + com.zaxxer + HikariCP + ${hikariCP.version} + + + + com.oracle.ojdbc + ojdbc8 + ${ojdbc.version} + + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_AsyncNotifyService.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_AsyncNotifyService.java.txt new file mode 100644 index 0000000000..f5fe26eb1d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_AsyncNotifyService.java.txt @@ -0,0 +1,83 @@ +====1 +1:35,36c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:35,36c +3:35,36c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:107c + +2:107c +3:107c + +====1 +1:130c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +2:130c +3:130c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +====2 +1:135a +3:135a +2:136,143c + <<<<<<< HEAD + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, + String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); + ||||||| a41d209d5 + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); + ======= +==== +1:137c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +2:145,146c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); + >>>>>>> TEMP_RIGHT_BRANCH +3:137c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====1 +1:157c + +2:166c +3:157c + +====1 +1:171c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +2:180c +3:171c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +====1 +1:177c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +2:186c +3:177c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +====1 +1:199c + InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +2:208c +3:199c + InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +====1 +1:262c + this.url = MessageFormat.format(URL_PATTERN, target, ApplicationUtils.getContextPath(), dataId, group); +2:271c +3:262c + this.url = MessageFormat.format(URL_PATTERN, target, EnvUtil.getContextPath(), dataId, group); +====1 +1:265c + .format(URL_PATTERN_TENANT, target, ApplicationUtils.getContextPath(), dataId, group, tenant); +2:274c +3:265c + .format(URL_PATTERN_TENANT, target, EnvUtil.getContextPath(), dataId, group, tenant); +====1 +1:312c + } +\ No newline at end of file +2:321c +3:312c + } diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_ConfigController.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_ConfigController.java.txt new file mode 100644 index 0000000000..aff019c609 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_ConfigController.java.txt @@ -0,0 +1,200 @@ +====1 +1:18a +2:19c +3:19c + import com.alibaba.nacos.api.config.ConfigType; +====1 +1:22a +2:24c +3:24c + import com.alibaba.nacos.common.model.RestResultUtils; +==== +1:34a +2:37,38c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; + <<<<<<< HEAD +3:37c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +====2 +1:35a +3:38a +2:40,43c + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.result.ResultBuilder; + ======= + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:38c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +2:45a +3:40a +====1 +1:45a +2:53c +3:48c + import com.alibaba.nacos.common.utils.NamespaceUtil; +====1 +1:48c + import com.alibaba.nacos.core.utils.InetUtils; +2:56c +3:51c + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:93,94c + private static final String NAMESPACE_PUBLIC_KEY = "public"; + +2:100a +3:95a +====1 +1:137a +2:144,147c +3:139,142c + //check type + if (!ConfigType.isValidType(type)) { + type = ConfigType.getDefaultType().getType(); + } +====1 +1:178c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:188c +3:183c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:199c + tenant = processTenant(tenant); +2:209c +3:204c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====1 +1:283c + return ResultBuilder.buildSuccessResult(true); +2:293c +3:288c + return RestResultUtils.success(true); +====1 +1:472c + tenant = processTenant(tenant); +2:482c +3:477c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====1 +1:527c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:537c +3:532c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +==== +1:530,534c + if (StringUtils.isNotBlank(namespace)) { + if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); + } +2:540,543c + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +3:535,538c + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====1 +1:535a +2:545c +3:540c + +====1 +1:548c + return ResultBuilder.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +2:558c +3:553c + return RestResultUtils.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +====1 +1:560c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +2:570c +3:565c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +====1 +1:584c + return ResultBuilder.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +2:594c +3:589c + return RestResultUtils.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +====1 +1:588c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:598c +3:593c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:601c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:611c +3:606c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:604c + return ResultBuilder.buildSuccessResult("导入成功", saveResult); +2:614c +3:609c + return RestResultUtils.success("导入成功", saveResult); +====1 +1:628c + return ResultBuilder.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +2:638c +3:633c + return RestResultUtils.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +==== +1:631,634c + + if (NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(namespace)) { + namespace = ""; + } else if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { +2:641,643c + + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { +3:636,638c + + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { +====1 +1:636c + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +2:645c +3:640c + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====1 +1:650c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:659c +3:654c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:674c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:683c +3:678c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:687c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:696c +3:691c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:690,697c + return ResultBuilder.buildSuccessResult("Clone Completed Successfully", saveResult); + } + + private String processTenant(String tenant) { + if (StringUtils.isEmpty(tenant) || NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(tenant)) { + return ""; + } + return tenant; +2:699c +3:694c + return RestResultUtils.success("Clone Completed Successfully", saveResult); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_DiskUtils.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_DiskUtils.java.txt new file mode 100644 index 0000000000..edbfb2552d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_DiskUtils.java.txt @@ -0,0 +1,24 @@ +356,364d355 +< <<<<<<< HEAD:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileOutputStream fos = new FileOutputStream( +< outputFile); final CheckedOutputStream cos = new CheckedOutputStream(fos, +< checksum); final ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(cos))) { +< ||||||| a41d209d5:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileOutputStream fos = new FileOutputStream( +< outputFile); final CheckedOutputStream cos = new CheckedOutputStream(fos, checksum); +< final ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(cos))) { +< ======= +368d358 +< >>>>>>> TEMP_RIGHT_BRANCH:sys/src/main/java/com/alibaba/nacos/sys/utils/DiskUtils.java +407,415d396 +< <<<<<<< HEAD:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileInputStream fis = new FileInputStream( +< sourceFile); final CheckedInputStream cis = new CheckedInputStream(fis, +< checksum); final ZipInputStream zis = new ZipInputStream(new BufferedInputStream(cis))) { +< ||||||| a41d209d5:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileInputStream fis = new FileInputStream( +< sourceFile); final CheckedInputStream cis = new CheckedInputStream(fis, checksum); +< final ZipInputStream zis = new ZipInputStream(new BufferedInputStream(cis))) { +< ======= +419d399 +< >>>>>>> TEMP_RIGHT_BRANCH:sys/src/main/java/com/alibaba/nacos/sys/utils/DiskUtils.java diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_ExternalDataSourceServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_ExternalDataSourceServiceImpl.java.txt new file mode 100644 index 0000000000..ed6e037647 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_ExternalDataSourceServiceImpl.java.txt @@ -0,0 +1,182 @@ +====1 +1:19,39c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import javax.sql.DataSource; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + +2:18a +3:18a +==== +1:40a +2:20,45c + <<<<<<< HEAD + import com.alibaba.nacos.common.utils.StringUtils; + import com.alibaba.nacos.config.server.monitor.MetricsMonitor; + import com.alibaba.nacos.config.server.utils.PropertyUtil; + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.zaxxer.hikari.HikariDataSource; + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + ||||||| a41d209d5 + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import javax.sql.DataSource; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + ======= + import com.alibaba.nacos.common.utils.IPUtil; +3:20c + import com.alibaba.nacos.common.utils.IPUtil; +====1 +1:45c + import com.alibaba.nacos.core.utils.ApplicationUtils; +2:50c +3:25c + import com.alibaba.nacos.sys.env.EnvUtil; +==== +1:46a +2:52,80c + >>>>>>> TEMP_RIGHT_BRANCH + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + <<<<<<< HEAD + import java.util.regex.Matcher; + import java.util.regex.Pattern; + ||||||| a41d209d5 + import com.alibaba.nacos.common.utils.ConvertUtils; + import com.alibaba.nacos.common.utils.StringUtils; + import com.alibaba.nacos.config.server.monitor.MetricsMonitor; + import com.alibaba.nacos.config.server.utils.ConfigExecutor; + import com.alibaba.nacos.config.server.utils.PropertyUtil; + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.zaxxer.hikari.HikariDataSource; + ======= + import java.util.concurrent.TimeUnit; + >>>>>>> TEMP_RIGHT_BRANCH + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +3:27,41c + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +====1 +1:55,58c + private static final Logger LOGGER = LoggerFactory.getLogger(ExternalDataSourceServiceImpl.class); + + private static final String JDBC_DRIVER_NAME = "com.mysql.cj.jdbc.Driver"; + +2:88a +3:49a +====1 +1:86,87c + private static Pattern ipPattern = Pattern.compile("\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}"); + +2:115a +3:76a +====1 +1:120,122c + + ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +2:148,150c +3:109,111c + + // ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + // ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +====1 +1:130c + .build(ApplicationUtils.getEnvironment(), (dataSource) -> { +2:158c +3:119c + .build(EnvUtil.getEnvironment(), (dataSource) -> { +====1 +1:193c + return "DOWN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +2:221c +3:182c + return "DOWN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====1 +1:196c + return "WARN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +2:224c +3:185c + return "WARN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====1 +1:204,217c + private String getIpFromUrl(String url) { + + Matcher m = ipPattern.matcher(url); + if (m.find()) { + return m.group(); + } + + return ""; + } + + static String defaultIfNull(String value, String defaultValue) { + return null == value ? defaultValue : value; + } + +2:231a +3:192a +====1 +1:272c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +2:286c +3:247c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); +====1 +1:275c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +2:289c +3:250c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_ExternalPermissionPersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_ExternalPermissionPersistServiceImpl.java.txt new file mode 100644 index 0000000000..3c95b4eb48 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_ExternalPermissionPersistServiceImpl.java.txt @@ -0,0 +1,215 @@ +==== +1:21c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +2:21,31c + <<<<<<< HEAD + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + ======= +3:20a +====1 +1:22a +2:33c +3:22c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +==== +1:24a +2:36c + >>>>>>> TEMP_RIGHT_BRANCH +3:25,28c + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; +====1 +1:27,28c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; +2:39c +3:31c + import org.springframework.data.domain.PageRequest; +====2 +1:30a +3:33a +2:42,49c + <<<<<<< HEAD + ||||||| a41d209d5 + import javax.annotation.PostConstruct; + import java.util.ArrayList; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.PERMISSION_ROW_MAPPER; + + ======= +====1 +1:32a +2:52,53c +3:36,37c + import java.util.Collections; + import java.util.List; +====2 +1:35a +3:40a +2:57c + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:46,53c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:68c +3:51c + private PermissionsRepository permissionsRepository; +==== +1:56,83c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role='" + role + "' "; + + if (StringUtils.isBlank(role)) { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:71,140c + <<<<<<< HEAD + org.springframework.data.domain.Page sPage = permissionsRepository + .findAll(QPermissionsEntity.permissionsEntity.role.eq(role), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(PermissionsMapStruct.INSTANCE.convertPermissionInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + ||||||| a41d209d5 + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role='" + role + "' "; + + if (StringUtils.isBlank(role)) { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + ======= + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(role)) { + params = Collections.singletonList(role); + } else { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:54,61c + org.springframework.data.domain.Page sPage = permissionsRepository + .findAll(QPermissionsEntity.permissionsEntity.role.eq(role), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(PermissionsMapStruct.INSTANCE.convertPermissionInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:94,102c + + String sql = "INSERT into permissions (role, resource, action) VALUES (?, ?, ?)"; + + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:151,152c +3:72,73c + + permissionsRepository.save(new PermissionsEntity(role, resource, action)); +====1 +1:113,120c + + String sql = "DELETE from permissions WHERE role=? and resource=? and action=?"; + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:163,167c +3:84,88c + + QPermissionsEntity qPermissions = QPermissionsEntity.permissionsEntity; + permissionsRepository.findOne( + qPermissions.role.eq(role).and(qPermissions.resource.eq(resource)).and(qPermissions.action.eq(action))) + .ifPresent(p -> permissionsRepository.delete(p)); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_ExternalRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_ExternalRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..b2b75323b7 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_ExternalRolePersistServiceImpl.java.txt @@ -0,0 +1,283 @@ +==== +1:21c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +2:21,31c + <<<<<<< HEAD + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + ======= +3:20a +====1 +1:22a +2:33c +3:22c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +==== +1:24a +2:36c + >>>>>>> TEMP_RIGHT_BRANCH +3:25,28c + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; +====1 +1:27,29c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.RowMapper; +2:39c +3:31c + import org.springframework.data.domain.PageRequest; +====2 +1:31a +3:33a +2:42,48c + <<<<<<< HEAD + ||||||| a41d209d5 + import javax.annotation.PostConstruct; + import java.sql.ResultSet; + import java.sql.SQLException; + import java.util.ArrayList; + ======= +==== +1:35a +2:53,54c + import java.util.Collections; + >>>>>>> TEMP_RIGHT_BRANCH +3:38c + import java.util.Collections; +====1 +1:36a +2:56c +3:40c + import java.util.stream.Collectors; +====1 +1:38c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.ROLE_INFO_ROW_MAPPER; +2:57a +3:41a +====1 +1:50,57c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:69c +3:53c + private RolesRepository rolesRepository; +====1 +1:61,81c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from (select distinct role from roles) roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " 1=1 "; + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + return pageInfo; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:73,80c +3:57,64c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +==== +1:86,104c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + where = " 1=1 "; + } + + try { + return helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:85,135c + <<<<<<< HEAD + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(QRolesEntity.rolesEntity.username.eq(username), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + ||||||| a41d209d5 + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + where = " 1=1 "; + } + + try { + return helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + ======= + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { + where = " 1=1 "; + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:69,76c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(QRolesEntity.rolesEntity.username.eq(username), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:110c + * @param role role string value. +2:141c +3:82c + * @param role role string value. +====1 +1:115,122c + String sql = "INSERT into roles (role, username) VALUES (?, ?)"; + + try { + jt.update(sql, role, userName); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:146c +3:87c + rolesRepository.save(new RolesEntity(userName, role)); +====1 +1:131,137c + String sql = "DELETE from roles WHERE role=?"; + try { + jt.update(sql, role); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:155,156c +3:96,97c + Iterable iterable = rolesRepository.findAll(QRolesEntity.rolesEntity.role.eq(role)); + rolesRepository.deleteAll(iterable); +====1 +1:143c + * @param role role string value. +2:162c +3:103c + * @param role role string value. +====1 +1:147,153c + String sql = "DELETE from roles WHERE role=? and username=?"; + try { + jt.update(sql, role, username); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:166,168c +3:107,109c + QRolesEntity qRoles = QRolesEntity.rolesEntity; + rolesRepository.findOne(qRoles.role.eq(role).and(qRoles.username.eq(username))) + .ifPresent(s -> rolesRepository.delete(s)); +==== +1:158,160c + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[]{role}, String.class); + return users; +2:173,185c + <<<<<<< HEAD + List rolesEntities = (List) rolesRepository + .findAll(QRolesEntity.rolesEntity.role.like(role)); + return rolesEntities.stream().map(s -> s.getRole()).collect(Collectors.toList()); + ||||||| a41d209d5 + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[]{role}, String.class); + return users; + ======= + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[] {role}, String.class); + return users; + >>>>>>> TEMP_RIGHT_BRANCH +3:114,116c + List rolesEntities = (List) rolesRepository + .findAll(QRolesEntity.rolesEntity.role.like(role)); + return rolesEntities.stream().map(s -> s.getRole()).collect(Collectors.toList()); +====1 +1:163,172c + private static final class RoleInfoRowMapper implements RowMapper { + + @Override + public RoleInfo mapRow(ResultSet rs, int rowNum) throws SQLException { + RoleInfo roleInfo = new RoleInfo(); + roleInfo.setRole(rs.getString("role")); + roleInfo.setUsername(rs.getString("username")); + return roleInfo; + } + } +2:187a +3:118a diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_ExternalStoragePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_ExternalStoragePersistServiceImpl.java.txt new file mode 100644 index 0000000000..44c1365407 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_ExternalStoragePersistServiceImpl.java.txt @@ -0,0 +1,3017 @@ +====1 +1:41,42c + import com.alibaba.nacos.config.server.service.datasource.DataSourceService; + import com.alibaba.nacos.config.server.service.datasource.DynamicDataSource; +2:41,74c +3:41,74c + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.HisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.QHisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QTenantInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.TenantInfoEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAdvanceInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAllInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigHistoryInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4BetaMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4TagMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoAggrMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoBetaWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoChangedMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoEntityMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoTagWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.TenantInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoAggrRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoBetaRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoTagRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigTagsRelationRepository; + import com.alibaba.nacos.config.server.modules.repository.HisConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.TenantInfoRepository; +====1 +1:48c + import com.google.common.collect.Lists; +2:80,81c +3:80,81c + import com.querydsl.core.BooleanBuilder; + import lombok.extern.slf4j.Slf4j; +====1 +1:50a +2:84,85c +3:84,85c + import org.springframework.beans.BeanUtils; + import org.springframework.beans.factory.annotation.Autowired; +====1 +1:55c + import org.springframework.dao.IncorrectResultSizeDataAccessException; +2:90,92c +3:90,92c + import org.springframework.data.domain.PageRequest; + import org.springframework.data.domain.Sort; + import org.springframework.data.jpa.domain.Specification; +====1 +1:57,61c + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.PreparedStatementCreator; + import org.springframework.jdbc.core.PreparedStatementSetter; + import org.springframework.jdbc.support.GeneratedKeyHolder; + import org.springframework.jdbc.support.KeyHolder; +2:93a +3:93a +====1 +1:70c + import javax.annotation.PostConstruct; +2:102,105c +3:102,105c + import javax.persistence.criteria.CriteriaBuilder; + import javax.persistence.criteria.CriteriaQuery; + import javax.persistence.criteria.Predicate; + import javax.persistence.criteria.Root; +====1 +1:72,75c + import java.sql.Connection; + import java.sql.PreparedStatement; + import java.sql.SQLException; + import java.sql.Statement; +2:106a +3:106a +====1 +1:81a +2:113c +3:113c + import java.util.stream.Collectors; +====1 +1:83,97c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ADVANCE_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ALL_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4TAG_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_AGGR_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BASE_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_CHANGED_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_KEY_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_DETAIL_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_LIST_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.TENANT_INFO_ROW_MAPPER; +2:114a +3:114a +====1 +1:104a +2:122c +3:122c + @Slf4j +====1 +1:110c + private DataSourceService dataSourceService; +2:128,129c +3:128,129c + @Autowired + private ConfigInfoRepository configInfoRepository; +====1 +1:112c + private static final String SQL_FIND_ALL_CONFIG_INFO = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,c_schema from config_info"; +2:131,132c +3:131,132c + @Autowired + private ConfigInfoBetaRepository configInfoBetaRepository; +====1 +1:114c + private static final String SQL_TENANT_INFO_COUNT_BY_TENANT_ID = "select count(1) from tenant_info where tenant_id = ?"; +2:134,135c +3:134,135c + @Autowired + private ConfigInfoTagRepository configInfoTagRepository; +====1 +1:116c + private static final String SQL_FIND_CONFIG_INFO_BY_IDS = "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5 FROM config_info WHERE "; +2:137,138c +3:137,138c + @Autowired + private ConfigTagsRelationRepository configTagsRelationRepository; +====1 +1:118c + private static final String SQL_DELETE_CONFIG_INFO_BY_IDS = "DELETE FROM config_info WHERE "; +2:140,141c +3:140,141c + @Autowired + private HisConfigInfoRepository hisConfigInfoRepository; +====1 +1:120c + private static final String PATTERN_STR = "*"; +2:143,144c +3:143,144c + @Autowired + private TenantInfoRepository tenantInfoRepository; +====1 +1:122c + private static final int QUERY_LIMIT_SIZE = 50; +2:146,147c +3:146,147c + @Autowired + private ConfigInfoAggrRepository configInfoAggrRepository; +====1 +1:124,126c + protected JdbcTemplate jt; + + protected TransactionTemplate tjt; +2:149,150c +3:149,150c + @Autowired + private TransactionTemplate tjt; +====1 +1:133,171c + /** + * init datasource. + */ + @PostConstruct + public void init() { + dataSourceService = DynamicDataSource.getInstance().getDataSource(); + + jt = getJdbcTemplate(); + tjt = getTransactionTemplate(); + } + + public boolean checkMasterWritable() { + return dataSourceService.checkMasterWritable(); + } + + public void setBasicDataSourceService(DataSourceService dataSourceService) { + this.dataSourceService = dataSourceService; + } + + public synchronized void reload() throws IOException { + this.dataSourceService.reload(); + } + + /** + * For unit testing. + */ + public JdbcTemplate getJdbcTemplate() { + return this.dataSourceService.getJdbcTemplate(); + } + + public TransactionTemplate getTransactionTemplate() { + return this.dataSourceService.getTransactionTemplate(); + } + + @SuppressWarnings("checkstyle:AbbreviationAsWordInName") + public String getCurrentDBUrl() { + return this.dataSourceService.getCurrentDbUrl(); + } + +2:156a +3:156a +====1 +1:174c + return new ExternalStoragePaginationHelperImpl(jt); +2:159c +3:159c + return null; +====1 +1:182,191c + boolean result = tjt.execute(status -> { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:167,183c +3:167,183c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfoEntity.getGroupId(), + configInfoEntity.getTenantId()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; + } + return Boolean.TRUE; +====1 +1:193c + return Boolean.TRUE; +2:184a +3:184a +====1 +1:202c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:193c +3:193c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:204,207c + jt.update("INSERT INTO config_info_beta(data_id,group_id,tenant_id,app_name,content,md5,beta_ips,src_ip," + + "src_user,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, appNameTmp, configInfo.getContent(), md5, betaIps, srcIp, srcUser, + time, time); +2:195,207c +3:195,207c + ConfigInfoBetaEntity configInfoBeta = new ConfigInfoBetaEntity(); + configInfoBeta.setDataId(configInfo.getDataId()); + configInfoBeta.setGroupId(configInfo.getGroup()); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setBetaIps(betaIps); + configInfoBeta.setMd5(md5); + configInfoBeta.setGmtCreate(time); + configInfoBeta.setGmtModified(time); + configInfoBeta.setSrcUser(srcUser); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setTenantId(tenantTmp); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:209c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:209c +3:209c + log.error("[db-error] " + e.toString(), e); +====1 +1:220c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:219a +3:219a +====1 +1:222,226c + jt.update( + "INSERT INTO config_info_tag(data_id,group_id,tenant_id,tag_id,app_name,content,md5,src_ip,src_user," + + "gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, tagTmp, appNameTmp, configInfo.getContent(), md5, srcIp, srcUser, + time, time); +2:221,234c +3:221,234c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoTagEntity configInfoTag = new ConfigInfoTagEntity(); + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:228c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:236c +3:236c + log.error("[db-error] " + e.toString(), e); +====1 +1:236,254c + boolean result = tjt.execute(status -> { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + /* + If the appName passed by the user is not empty, use the persistent user's appName, + otherwise use db; when emptying appName, you need to pass an empty string + */ + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // delete all tags and then recreate + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); +2:244,268c +3:244,268c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + // 用户传过来的appName不为空,则用持久化用户的appName,否则用db的;清空appName的时候需要传空串 + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + configInfo.setId(oldConfigInfo.getId()); + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // 删除所有tag,然后再重新创建 + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); + } + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:256,259c + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:270c +3:270c + return Boolean.TRUE; +====1 +1:261c + return Boolean.TRUE; +2:271a +3:271a +====1 +1:268c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); +2:277a +3:277a +====1 +1:270,275c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + try { + jt.update( + "UPDATE config_info_beta SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5, srcIp, srcUser, + time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp); +2:279,300c +3:279,300c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(configInfo.getDataId())) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(configInfo.getDataId())); + } + if (StringUtils.isNotBlank(configInfo.getGroup())) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(configInfo.getGroup())); + } + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenantTmp)); + } + ConfigInfoBetaEntity configInfoBeta = configInfoBetaRepository.findOne(booleanBuilder).orElse(null); + try { + String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setMd5(md5); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setSrcUser(srcUser); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:277c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:302c +3:302c + log.error("[db-error] " + e.toString(), e); +====1 +1:288,293c + try { + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + jt.update( + "UPDATE config_info_tag SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", configInfo.getContent(), md5, + srcIp, srcUser, time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp, tagTmp); +2:313,331c +3:313,331c + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + ConfigInfoTagEntity configInfoTag = configInfoTagRepository.findOne( + qConfigInfoTag.dataId.eq(configInfo.getDataId()).and(qConfigInfoTag.groupId.eq(configInfo.getGroup())) + .and(qConfigInfoTag.tenantId.eq(tenantTmp)).and(qConfigInfoTag.tagId.eq(tagTmp))) + .orElse(new ConfigInfoTagEntity()); + try { + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:295c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:333c +3:333c + log.error("[db-error] " + e.toString(), e); +====1 +1:323,330c + try { + jt.update( + "UPDATE config_info SET md5 = ? WHERE data_id=? AND group_id=? AND tenant_id=? AND gmt_modified=?", + md5, dataId, group, tenantTmp, lastTime); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:361,378c +3:361,378c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (lastTime != null) { + booleanBuilder.and(qConfigInfo.gmtModified.eq(lastTime)); + } + configInfoRepository.findOne(booleanBuilder).ifPresent(config -> { + config.setMd5(md5); + configInfoRepository.save(config); + }); +====1 +1:416,421c + tjt.execute(status -> { + try { + ConfigInfo configInfo = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo != null) { + jt.update("DELETE FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, + group, tenantTmp); +2:464,474c +3:464,474c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo4Beta configInfo4Beta = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo4Beta != null) { + configInfoBetaRepository.deleteById(configInfo4Beta.getId()); + } + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:423,425c + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:476c +3:476c + return Boolean.TRUE; +====1 +1:427c + return Boolean.TRUE; +2:477a +3:477a +====1 +1:439,442c + String select = "SELECT content FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + String insert = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + String update = "UPDATE config_info_aggr SET content = ? , gmt_modified = ? WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + +2:489,502c +3:489,502c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } +====1 +1:445,446c + String dbContent = jt + .queryForObject(select, new Object[] {dataId, group, tenantTmp, datumId}, String.class); +2:505c +3:505c + ConfigInfoAggrEntity result = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); +====1 +1:448c + if (dbContent != null && dbContent.equals(content)) { +2:507c +3:507c + if (result.getContent() != null && result.getContent().equals(content)) { +====1 +1:451c + return jt.update(update, content, now, dataId, group, tenantTmp, datumId) > 0; +2:510,513c +3:510,513c + result.setContent(content); + result.setGmtModified(now); + configInfoAggrRepository.save(result); + return true; +====1 +1:454c + return jt.update(insert, dataId, group, tenantTmp, datumId, appNameTmp, content, now) > 0; +2:516,526c +3:516,526c + ConfigInfoAggrEntity configInfoAggrEntity = new ConfigInfoAggrEntity(); + configInfoAggrEntity.setDataId(dataId); + configInfoAggrEntity.setGroupId(group); + configInfoAggrEntity.setDatumId(datumId); + configInfoAggrEntity.setContent(content); + configInfoAggrEntity.setGmtModified(now); + configInfoAggrEntity.setAppName(appNameTmp); + configInfoAggrEntity.setTenantId(tenantTmp); + configInfoAggrRepository.save(configInfoAggrEntity); + return true; + +====1 +1:465,466c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; +2:536a +3:536a +====1 +1:468,482c + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index++, tenantTmp); + ps.setString(index, datumId); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:537a +3:537a +====1 +1:487,502c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=?"; + + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index, tenantTmp); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:542,545c +3:542,545c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); +====1 +1:503a +2:547,553c +3:547,553c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + configInfoAggrRepository.findOne(booleanBuilder).ifPresent(aggr -> configInfoAggrRepository.delete(aggr)); +====1 +1:509,523c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final StringBuilder datumString = new StringBuilder(); + for (String datum : datumList) { + datumString.append("'").append(datum).append("',"); + } + datumString.deleteCharAt(datumString.length() - 1); + final String sql = + "delete from config_info_aggr where data_id=? and group_id=? and tenant_id=? and datum_id in (" + + datumString.toString() + ")"; + try { + jt.update(sql, dataId, group, tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:558a +3:558a +====1 +1:529,536c + String sql = "delete from his_config_info where gmt_modified < ? limit ?"; + PaginationHelper helper = createPaginationHelper(); + try { + helper.updateLimit(sql, new Object[] {startTime, limitSize}); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:564,567c +3:564,567c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository + .findAll(qHisConfigInfo.gmtModified.lt(startTime), PageRequest.of(0, limitSize)); + hisConfigInfoRepository.deleteAll(iterable); +====1 +1:541,542c + String sql = "SELECT COUNT(*) FROM his_config_info WHERE gmt_modified < ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {startTime}); +2:572,573c +3:572,573c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Long result = hisConfigInfoRepository.count(qHisConfigInfo.gmtModified.lt(startTime)); +====1 +1:551c + String sql = "SELECT max(id) FROM config_info"; +2:581a +3:581a +====1 +1:553c + return jt.queryForObject(sql, Long.class); +2:583,584c +3:583,584c + //TODO 关系型特性查询 + return configInfoRepository.findConfigMaxId(); +====1 +1:591,617c + try { + Boolean isReplaceOk = tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + String appNameTmp = appName == null ? "" : appName; + removeAggrConfigInfo(dataId, group, tenant); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + for (Map.Entry datumEntry : datumMap.entrySet()) { + jt.update(sql, dataId, group, tenantTmp, datumEntry.getKey(), appNameTmp, + datumEntry.getValue(), new Timestamp(System.currentTimeMillis())); + } + } catch (Throwable e) { + throw new TransactionSystemException("error in addAggrConfigInfo"); + } + return Boolean.TRUE; + } + }); + if (isReplaceOk == null) { + return false; + } + return isReplaceOk; + } catch (TransactionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:622c +3:622c + return true; +====1 +1:624,636c + String sql = "SELECT DISTINCT data_id, group_id FROM config_info"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:629c +3:629c + return null; +====1 +1:641,651c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,beta_ips FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO4BETA_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:634,647c +3:634,647c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenant)); + } + ConfigInfoBetaEntity configInfoBetaEntity = configInfoBetaRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoBeta data null")); + return ConfigInfo4BetaMapStruct.INSTANCE.convertConfigInfo4Beta(configInfoBetaEntity); +====1 +1:659,668c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,tag_id,app_name,content FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", + new Object[] {dataId, group, tenantTmp, tagTmp}, CONFIG_INFO4TAG_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:655,671c +3:655,671c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + ConfigInfoTagEntity result = configInfoTagRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoTag data null")); + return ConfigInfo4TagMapStruct.INSTANCE.convertConfigInfo4Tag(result); +====1 +1:674,684c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=? AND app_name=?", + new Object[] {dataId, group, tenantTmp, appName}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:677c +3:677c + return null; +====1 +1:690,733c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(group); + paramList.add(tenantTmp); + + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and group_id=? and tenant_id=? "); + if (StringUtils.isNotBlank(configTags)) { + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.group_id=? and a.tenant_id=? "); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sql.append(", "); + } + sql.append("?"); + paramList.add(tagArr[i]); + } + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return this.jt.queryForObject(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:683c +3:683c + return null; +====1 +1:739,748c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, StringUtils.EMPTY}, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:689,695c +3:689,695c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + return configInfoRepository.findOne(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group))).map(s -> { + ConfigInfoBase configInfoBase = new ConfigInfoBase(); + BeanUtils.copyProperties(s, configInfoBase); + configInfoBase.setGroup(s.getGroupId()); + return configInfoBase; + }).orElse(null); +====1 +1:753,762c + try { + return this.jt + .queryForObject("SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE ID=?", + new Object[] {id}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:700,701c +3:700,701c + + return null; +====1 +1:767,776c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5,type FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:706,712c +3:706,712c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:777a +2:714,718c +3:714,718c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity result = configInfoRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfo(result); +====1 +1:783,792c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:724,725c +3:724,725c + + return null; +====1 +1:798,807c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? and app_name=?", + new Object[] {dataId, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:731,732c +3:731,732c + + return null; +====1 +1:813,864c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where data_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:738c +3:738c + return null; +====1 +1:870,871c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); +2:743a +3:743a +====1 +1:874,922c + String sqlCount = "select count(*) from config_info"; + String sql = "select ID,data_id,group_id,tenant_id,app_name,content,type from config_info"; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id"; + sql = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id"; + + where.append(" a.tenant_id=? "); + + if (StringUtils.isNotBlank(dataId)) { + where.append(" and a.data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and a.group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and a.app_name=? "); + paramList.add(appName); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id=? "); + if (StringUtils.isNotBlank(dataId)) { + where.append(" and data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and app_name=? "); + paramList.add(appName); + } +2:746,765c +3:746,765c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + } + + private void buildConfigInfoCommonCondition(BooleanBuilder booleanBuilder, QConfigInfoEntity qConfigInfo, + final String dataId, final String group, final String appName) { + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); +====1 +1:924,929c + try { + return helper.fetchPage(sqlCount + where, sql + where, paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:767,771c +3:767,771c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(appName)) { + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:935,943c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:777c +3:777c + return null; +====1 +1:949,958c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:783c +3:783c + return null; +====1 +1:964,973c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=? and app_name =?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? and app_name =?", + new Object[] {group, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:789c +3:789c + return null; +====1 +1:979,1032c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder( + "select count(*) from config_info where group_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(group); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:795c +3:795c + return null; +====1 +1:1038,1047c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where tenant_id like ? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? and app_name=?", + new Object[] {generateLikeArgument(tenantTmp), appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:801c +3:801c + return null; +====1 +1:1053,1104c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where tenant_id like ? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:807c +3:807c + return null; +====1 +1:1110,1118c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:813c +3:813c + return null; +====1 +1:1123,1124c + String sql = " SELECT COUNT(ID) FROM config_info "; + Integer result = jt.queryForObject(sql, Integer.class); +2:818c +3:818c + Long result = configInfoRepository.count(); +====1 +1:1133,1134c + String sql = " SELECT COUNT(ID) FROM config_info where tenant_id like ?"; + Integer result = jt.queryForObject(sql, new Object[] {tenant}, Integer.class); +2:827,828c +3:827,828c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.tenantId.like(tenant)); +====1 +1:1143,1144c + String sql = " SELECT COUNT(ID) FROM config_info_beta "; + Integer result = jt.queryForObject(sql, Integer.class); +2:837c +3:837c + Long result = configInfoBetaRepository.count(); +====1 +1:1153,1154c + String sql = " SELECT COUNT(ID) FROM config_info_tag "; + Integer result = jt.queryForObject(sql, Integer.class); +2:846c +3:846c + Long result = configInfoTagRepository.count(); +====1 +1:1162,1165c + public List getTenantIdList(int page, int pageSize) { + String sql = "SELECT tenant_id FROM config_info WHERE tenant_id != '' GROUP BY tenant_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:854,864c +3:854,864c + public List getTenantIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("tenantId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1169,1172c + public List getGroupIdList(int page, int pageSize) { + String sql = "SELECT group_id FROM config_info WHERE tenant_id ='' GROUP BY group_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:868,878c +3:868,878c + public List getGroupIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("groupId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1178,1179c + String sql = " SELECT COUNT(ID) FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {dataId, group, tenantTmp}); +2:884,886c +3:884,886c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))); +====1 +1:1188,1213c + if (datumIds == null || datumIds.isEmpty()) { + return 0; + } + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder sql = new StringBuilder( + " SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ? and datum_id"); + if (isIn) { + sql.append(" in ("); + } else { + sql.append(" not in ("); + } + for (int i = 0, size = datumIds.size(); i < size; i++) { + if (i > 0) { + sql.append(", "); + } + sql.append("?"); + } + sql.append(")"); + + List objectList = Lists.newArrayList(dataId, group, tenantTmp); + objectList.addAll(datumIds); + Integer result = jt.queryForObject(sql.toString(), Integer.class, objectList.toArray()); + if (result == null) { + throw new IllegalArgumentException("aggrConfigInfoCount error"); + } + return result.intValue(); +2:895c +3:895c + return 0; +====1 +1:1228,1242c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5 " + + " FROM ( SELECT id FROM config_info WHERE tenant_id like ? ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, + new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:910c +3:910c + return null; +====1 +1:1247,1282c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String select = " SELECT data_id,group_id,app_name FROM ( " + + " SELECT id FROM config_info WHERE tenant_id LIKE ? ORDER BY id LIMIT ?, ? )" + + " g, config_info t WHERE g.id = t.id "; + + final int totalCount = configInfoCount(tenant); + int pageCount = totalCount / pageSize; + if (totalCount > pageSize * pageCount) { + pageCount++; + } + + if (pageNo > pageCount) { + return null; + } + + final Page page = new Page(); + page.setPageNumber(pageNo); + page.setPagesAvailable(pageCount); + page.setTotalCount(totalCount); + + try { + List result = jt + .query(select, new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, + // new Object[0], + CONFIG_KEY_ROW_MAPPER); + + for (ConfigKey item : result) { + page.getPageItems().add(item); + } + return page; + } catch (EmptyResultDataAccessException e) { + return page; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:915c +3:915c + return null; +====1 +1:1288,1300c + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,content,md5" + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) " + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:921c +3:921c + return null; +====1 +1:1305,1319c + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = " SELECT t.id,type,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + + List params = new ArrayList(); + + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, params.toArray(), pageNo, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:926c +3:926c + return null; +====1 +1:1324,1332c + String select = "SELECT id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,type from config_info where id > ? order by id asc limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(select, new Object[] {lastMaxId, 0, pageSize}, 1, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:931,940c +3:931,940c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(qConfigInfo.id.gt(lastMaxId), PageRequest.of(0, pageSize, Sort.by(Sort.Order.asc("id")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1337,1349c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_beta"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,beta_ips " + + " FROM ( SELECT id FROM config_info_beta ORDER BY id LIMIT ?,? )" + + " g, config_info_beta t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:945,952c +3:945,952c + org.springframework.data.domain.Page sPage = configInfoBetaRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoBetaWrapperMapStruct.INSTANCE.convertConfigInfoBetaWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1354,1366c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_tag"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,tag_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info_tag ORDER BY id LIMIT ?,? ) " + + "g, config_info_tag t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:957,964c +3:957,964c + org.springframework.data.domain.Page sPage = configInfoTagRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoTagWrapperMapStruct.INSTANCE.convertConfigInfoTagWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1372,1414c + // assert dataids group not null + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + // if dataids empty return empty list + if (CollectionUtils.isEmpty(dataIds)) { + return Collections.emptyList(); + } + + // Batch query limit + // The number of in is controlled within 100, the shorter the length of the SQL statement, the better + if (subQueryLimit > QUERY_LIMIT_SIZE) { + subQueryLimit = 50; + } + List result = new ArrayList(dataIds.size()); + + String sqlStart = "select data_id, group_id, tenant_id, app_name, content from config_info where group_id = ? and tenant_id = ? and data_id in ("; + String sqlEnd = ")"; + StringBuilder subQuerySql = new StringBuilder(); + + for (int i = 0; i < dataIds.size(); i += subQueryLimit) { + // dataids + List params = new ArrayList( + dataIds.subList(i, i + subQueryLimit < dataIds.size() ? i + subQueryLimit : dataIds.size())); + + for (int j = 0; j < params.size(); j++) { + subQuerySql.append("?"); + if (j != params.size() - 1) { + subQuerySql.append(","); + } + } + + // group + params.add(0, group); + params.add(1, tenantTmp); + + List r = this.jt + .query(sqlStart + subQuerySql.toString() + sqlEnd, params.toArray(), CONFIG_INFO_ROW_MAPPER); + + // assert not null + if (r != null && r.size() > 0) { + result.addAll(r); + } + } + return result; +2:970c +3:970c + return null; +====1 +1:1420,1463c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + if (StringUtils.isBlank(appName)) { + return this.findAllConfigInfo(pageNo, pageSize, tenantTmp); + } else { + return this.findConfigInfoByApp(pageNo, pageSize, tenantTmp, appName); + } + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + where += " and tenant_id like ? "; + params.add(generateLikeArgument(tenantTmp)); + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:976c +3:976c + return null; +====1 +1:1469,1562c + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + for (ConfigKey configInfo : configKeys) { + String dataId = configInfo.getDataId(); + String group = configInfo.getGroup(); + String appName = configInfo.getAppName(); + + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + return helper.fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:982c +3:982c + return null; +====1 +1:1572,1636c + PaginationHelper helper = createPaginationHelper(); + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info"; + StringBuilder where = new StringBuilder(" where "); + List params = new ArrayList(); + params.add(generateLikeArgument(tenantTmp)); + if (StringUtils.isNotBlank(configTags)) { + sqlCountRows = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id "; + sqlFetchRows = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join config_tags_relation b on a.id=b.id "; + + where.append(" a.tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and a.data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and a.group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and a.app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and a.content like ? "); + params.add(generateLikeArgument(content)); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + params.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and content like ? "); + params.add(generateLikeArgument(content)); + } + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:992,1008c +3:992,1008c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.like(tenant)); + } + if (StringUtils.isNotBlank(content)) { + booleanBuilder.and(qConfigInfo.content.like(content)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1642,1672c + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + throw new IOException("invalid param"); + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,content from config_info where "; + String where = " 1=1 and tenant_id='' "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1014c +3:1014c + return null; +====1 +1:1678,1691c + String sql = "SELECT id,data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; + + try { + return this.jt + .queryForObject(sql, new Object[] {dataId, group, tenantTmp, datumId}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + // EmptyResultDataAccessException, indicating that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); +2:1020,1026c +3:1020,1026c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); +====1 +1:1692a +2:1028,1036c +3:1028,1036c + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenantTmp)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } + + ConfigInfoAggrEntity configInfoAggrEntity = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggr(configInfoAggrEntity); +====1 +1:1697,1710c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "SELECT data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? ORDER BY datum_id"; + + try { + return this.jt.query(sql, new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1041c +3:1041c + return null; +====1 +1:1717,1730c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where data_id=? and " + + "group_id=? and tenant_id=? order by datum_id limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, new Object[] {dataId, group, tenantTmp}, sqlFetchRows, + new Object[] {dataId, group, tenantTmp, (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_AGGR_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1048,1058c +3:1048,1058c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + org.springframework.data.domain.Page sPage = configInfoAggrRepository.findAll( + qConfigInfoAggr.dataId.eq(dataId).and(qConfigInfoAggr.groupId.eq(group)) + .and(qConfigInfoAggr.tenantId.eq(tenantTmp)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.by("datumId")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggrList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1737,1831c + String sqlCountRows = "select count(*) from config_info_aggr where "; + String sqlFetchRows = "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + + for (ConfigKey configInfoAggr : configKeys) { + String dataId = configInfoAggr.getDataId(); + String group = configInfoAggr.getGroup(); + String appName = configInfoAggr.getAppName(); + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + Page result = helper + .fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_AGGR_ROW_MAPPER); + return result; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1065c +3:1065c + return null; +====1 +1:1836,1848c + String sql = "SELECT DISTINCT data_id, group_id, tenant_id FROM config_info_aggr"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_CHANGED_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1070,1071c +3:1070,1071c + List list = configInfoAggrRepository.findAllAggrGroup(); + return ConfigInfoChangedMapStruct.INSTANCE.convertConfigInfoChangedList(list); +====1 +1:1853,1864c + String sql = "SELECT datum_id FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND content = ? "; + + try { + return this.jt.queryForList(sql, new Object[] {dataId, groupId, content}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1076c +3:1076c + return null; +====1 +1:1869,1877c + try { + List> list = jt.queryForList( + "SELECT data_id, group_id, tenant_id, app_name, content, gmt_modified FROM config_info WHERE gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertChangeConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1081,1084c +3:1081,1084c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Iterable iterable = configInfoRepository + .findAll(qConfigInfo.gmtModified.goe(startTime).and(qConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList((List) iterable); +====1 +1:1884,1924c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_modified from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + if (!StringUtils.isBlank(tenantTmp)) { + where += " and tenant_id = ? "; + params.add(tenantTmp); + } + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (startTime != null) { + where += " and gmt_modified >=? "; + params.add(startTime); + } + if (endTime != null) { + where += " and gmt_modified <=? "; + params.add(endTime); + } + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + lastMaxId, CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1091c +3:1091c + return null; +====1 +1:1929,1937c + try { + List> list = jt.queryForList( + "SELECT DISTINCT data_id, group_id, tenant_id FROM his_config_info WHERE op_type = 'D' AND gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertDeletedConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1096,1100c +3:1096,1100c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository.findAll( + qHisConfigInfo.opType.eq("D").and(qHisConfigInfo.gmtModified.goe(startTime)) + .and(qHisConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList((List) iterable); +====1 +1:1943,1947c + final String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + final String tenantTmp = + StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + +2:1105a +3:1105a +====1 +1:1953,1960c + + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + + KeyHolder keyHolder = new GeneratedKeyHolder(); + + final String sql = + "INSERT INTO config_info(data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_create," + + "gmt_modified,c_desc,c_use,effect,type,c_schema) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; +2:1111,1120c +3:1111,1120c + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setGmtCreate(time); + configInfoEntity.setGmtModified(time); +====1 +1:1963,1991c + jt.update(new PreparedStatementCreator() { + @Override + public PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + PreparedStatement ps = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS); + ps.setString(1, configInfo.getDataId()); + ps.setString(2, configInfo.getGroup()); + ps.setString(3, tenantTmp); + ps.setString(4, appNameTmp); + ps.setString(5, configInfo.getContent()); + ps.setString(6, md5Tmp); + ps.setString(7, srcIp); + ps.setString(8, srcUser); + ps.setTimestamp(9, time); + ps.setTimestamp(10, time); + ps.setString(11, desc); + ps.setString(12, use); + ps.setString(13, effect); + ps.setString(14, type); + ps.setString(15, schema); + return ps; + } + }, keyHolder); + Number nu = keyHolder.getKey(); + if (nu == null) { + throw new IllegalArgumentException("insert config_info fail"); + } + return nu.longValue(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:1123,1124c +3:1123,1124c + return configInfoRepository.save(configInfoEntity).getId(); + } catch (Exception e) { +====1 +1:2008,2015c + try { + jt.update( + "INSERT INTO config_tags_relation(id,tag_name,tag_type,data_id,group_id,tenant_id) VALUES(?,?,?,?,?,?)", + configId, tagName, null, dataId, group, tenant); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1141,1147c +3:1141,1147c + ConfigTagsRelationEntity configTagsRelation = new ConfigTagsRelationEntity(); + configTagsRelation.setId(configId); + configTagsRelation.setTagName(tagName); + configTagsRelation.setDataId(dataId); + configTagsRelation.setGroupId(group); + configTagsRelation.setTenantId(tenant); + configTagsRelationRepository.save(configTagsRelation); +====1 +1:2020,2025c + try { + jt.update("DELETE FROM config_tags_relation WHERE id=?", id); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1152c +3:1152c + configTagsRelationRepository.findById(id).ifPresent(s -> configTagsRelationRepository.delete(s)); +====1 +1:2030,2040c + String sql = "SELECT tag_name FROM config_tags_relation WHERE tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1157c +3:1157c + return null; +====1 +1:2045,2055c + String sql = "SELECT tag_name FROM config_tags_relation WHERE data_id=? AND group_id=? AND tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {dataId, group, tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1162,1176c +3:1162,1176c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigTagsRelationEntity qConfigTagsRelation = QConfigTagsRelationEntity.configTagsRelationEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigTagsRelation.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigTagsRelation.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigTagsRelation.tenantId.eq(tenant)); + } + Iterable iterable = configTagsRelationRepository.findAll(booleanBuilder); + List result = new ArrayList<>(); + iterable.forEach(s -> result.add(s.getTagName())); + return result; +====1 +1:2061,2067c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + jt.update("DELETE FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, group, + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1182,1187c +3:1182,1187c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); +====1 +1:2068a +2:1189,1190c +3:1189,1190c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + configInfos.forEach(s -> configInfoRepository.delete(s)); +====1 +1:2076,2077c + StringBuilder sql = new StringBuilder(SQL_DELETE_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1198,1200c +3:1198,1200c + if (StringUtils.isBlank(ids)) { + return; + } +====1 +1:2081,2084c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1203a +3:1203a +====1 +1:2087,2093c + sql.append(") "); + try { + jt.update(sql.toString(), paramList.toArray()); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1206,1219c +3:1206,1219c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + for (Long id : paramList) { + configInfoRepository.deleteById(id); + } + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2099,2106c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String tagTmp = StringUtils.isBlank(tag) ? StringUtils.EMPTY : tag; + try { + jt.update("DELETE FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", dataId, + group, tenantTmp, tagTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1225,1234c +3:1225,1234c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); +====1 +1:2107a +2:1236,1251c +3:1236,1251c + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + Iterable configInfoTags = configInfoTagRepository.findAll(booleanBuilder); + configInfoTags.forEach(s -> configInfoTagRepository.delete(s)); + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2113,2115c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String tenantTmp = StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:1257,1258c +3:1257,1258c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:2122,2131c + try { + jt.update("UPDATE config_info SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?," + + "app_name=?,c_desc=?,c_use=?,effect=?,type=?,c_schema=? " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5Tmp, srcIp, srcUser, + time, appNameTmp, desc, use, effect, type, schema, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1265,1272c +3:1265,1272c + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setGmtModified(time); + configInfoRepository.save(configInfoEntity); +====1 +1:2139,2140c + StringBuilder sql = new StringBuilder(SQL_FIND_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1279a +3:1279a +====1 +1:2144,2147c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1282a +3:1282a +====1 +1:2150,2158c + sql.append(") "); + try { + return this.jt.query(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1285,1288c +3:1285,1288c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + List list = (List) configInfoRepository + .findAll(qConfigInfo.id.in(paramList)); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(list); +====1 +1:2163,2176c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAdvanceInfo configAdvance = this.jt.queryForObject( + "SELECT gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_ADVANCE_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1293,1314c +3:1293,1314c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAdvanceInfo configAdvance = ConfigAdvanceInfoMapStruct.INSTANCE.convertConfigAdvanceInfo(configInfo); + List configTagList = this.selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2178c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1315a +3:1315a +====1 +1:2180,2185c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1317c +3:1317c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2186a +2:1319c +3:1319c + return configAdvance; +====1 +1:2191,2206c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAllInfo configAdvance = this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5," + + "gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_ALL_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1324,1346c +3:1324,1346c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAllInfo configAdvance = ConfigAllInfoMapStruct.INSTANCE.convertConfigAllInfo(configInfo); + configAdvance.setGroup(configInfo.getGroupId()); + List configTagList = selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2208c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1347a +3:1347a +====1 +1:2210,2215c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1349c +3:1349c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2216a +2:1351c +3:1351c + return configAdvance; +====1 +1:2225,2233c + try { + jt.update( + "INSERT INTO his_config_info (id,data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_modified,op_type) " + + "VALUES(?,?,?,?,?,?,?,?,?,?,?)", id, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp, appNameTmp, configInfo.getContent(), md5Tmp, srcIp, srcUser, time, ops); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1360,1373c +3:1360,1373c + HisConfigInfoEntity hisConfigInfo = new HisConfigInfoEntity(); + hisConfigInfo.setId(id); + hisConfigInfo.setDataId(configInfo.getDataId()); + hisConfigInfo.setGroupId(configInfo.getGroup()); + hisConfigInfo.setAppName(appNameTmp); + hisConfigInfo.setContent(configInfo.getContent()); + hisConfigInfo.setMd5(md5Tmp); + hisConfigInfo.setGmtModified(time); + hisConfigInfo.setSrcUser(srcUser); + hisConfigInfo.setSrcIp(srcIp); + hisConfigInfo.setOpType(ops); + hisConfigInfo.setTenantId(tenantTmp); + hisConfigInfo.setGmtCreate(time); + hisConfigInfoRepository.save(hisConfigInfo); +====1 +1:2239,2255c + PaginationHelper helper = createPaginationHelper(); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from his_config_info where data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select nid,data_id,group_id,tenant_id,app_name,src_ip,src_user,op_type,gmt_create,gmt_modified from his_config_info " + + "where data_id = ? and group_id = ? and tenant_id = ? order by nid desc"; + + Page page = null; + try { + page = helper + .fetchPage(sqlCountRows, sqlFetchRows, new Object[] {dataId, group, tenantTmp}, pageNo, pageSize, + HISTORY_LIST_ROW_MAPPER); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG + .error("[list-config-history] error, dataId:{}, group:{}", new Object[] {dataId, group}, e); + throw e; + } +2:1379,1389c +3:1379,1389c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + org.springframework.data.domain.Page sPage = hisConfigInfoRepository.findAll( + qHisConfigInfo.dataId.eq(dataId).and(qHisConfigInfo.groupId.eq(group)) + .and(qHisConfigInfo.tenantId.eq(tenant)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("nid")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); +====1 +1:2262,2270c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "INSERT INTO app_configdata_relation_subs(data_id,group_id,app_name,gmt_modified) VALUES(?,?,?,?)", + dataId, group, appNameTmp, date); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1395a +3:1395a +====1 +1:2276,2284c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "UPDATE app_configdata_relation_subs SET gmt_modified=? WHERE data_id=? AND group_id=? AND app_name=?", + time, dataId, group, appNameTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1401c +3:1401c + +==== +1:2289c + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; +2:1406,1434c + <<<<<<< HEAD + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + ||||||| a41d209d5 + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[list-config-history] error, nid:{}", new Object[] {nid}, e); + throw e; + } + ======= + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[detail-config-history] error, nid:{}", new Object[] {nid}, e); + throw e; + } + } + + @Override + public ConfigHistoryInfo detailPreviousConfigHistory(Long id) { + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = (select max(nid) from his_config_info where id = ?) "; +3:1406,1413c + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + } + + @Override + public ConfigHistoryInfo detailPreviousConfigHistory(Long id) { + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = (select max(nid) from his_config_info where id = ?) "; +====1 +1:2292c + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); +2:1437c +3:1416c + .queryForObject(sqlFetchRows, new Object[] {id}, HISTORY_DETAIL_ROW_MAPPER); +====1 +1:2295c + LogUtil.FATAL_LOG.error("[list-config-history] error, nid:{}", new Object[] {nid}, e); +2:1440c +3:1419c + LogUtil.FATAL_LOG.error("[detail-previous-config-history] error, id:{}", new Object[] {id}, e); +====2 +1:2297a +3:1421a +2:1443c + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:2303,2310c + try { + jt.update( + "INSERT INTO tenant_info(kp,tenant_id,tenant_name,tenant_desc,create_source,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?)", + kp, tenantId, tenantName, tenantDesc, createResoure, time, time); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1449,1457c +3:1427,1435c + TenantInfoEntity tenantInfo = new TenantInfoEntity(); + tenantInfo.setKp(kp); + tenantInfo.setTenantId(tenantId); + tenantInfo.setTenantName(tenantName); + tenantInfo.setTenantDesc(tenantDesc); + tenantInfo.setCreateSource(createResoure); + tenantInfo.setGmtCreate(time); + tenantInfo.setGmtModified(time); + tenantInfoRepository.save(tenantInfo); +====1 +1:2315,2322c + try { + jt.update( + "UPDATE tenant_info SET tenant_name = ?, tenant_desc = ?, gmt_modified= ? WHERE kp=? AND tenant_id=?", + tenantName, tenantDesc, System.currentTimeMillis(), kp, tenantId); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1462,1467c +3:1440,1445c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + tenantInfoRepository.findOne(qTenantInfo.kp.eq(kp).and(qTenantInfo.tenantId.eq(tenantId))).ifPresent(s -> { + s.setTenantName(tenantName); + s.setTenantDesc(tenantDesc); + tenantInfoRepository.save(s); + }); +====1 +1:2327,2338c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=?"; + try { + return this.jt.query(sql, new Object[] {kp}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1472,1473c +3:1450,1451c + List list = tenantInfoRepository.findByKp(kp); + return TenantInfoMapStruct.INSTANCE.convertTenantInfoList(list); +====1 +1:2343,2354c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=? AND tenant_id=?"; + try { + return jt.queryForObject(sql, new Object[] {kp, tenantId}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1478,1479c +3:1456,1457c + TenantInfoEntity tenantInfoEntity = tenantInfoRepository.findByKpAndTenantId(kp, tenantId); + return TenantInfoMapStruct.INSTANCE.convertTenantInfo(tenantInfoEntity); +====1 +1:2359,2364c + try { + jt.update("DELETE FROM tenant_info WHERE kp=? AND tenant_id=?", kp, tenantId); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1484,1485c +3:1462,1463c + tenantInfoRepository.findOne(QTenantInfoEntity.tenantInfoEntity.tenantId.eq(tenantId) + .and(QTenantInfoEntity.tenantInfoEntity.kp.eq(kp))).ifPresent(s -> tenantInfoRepository.delete(s)); +====1 +1:2418,2431c + String sqlCountRows = " SELECT COUNT(*) FROM config_info "; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,md5,type,gmt_modified FROM " + + "( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) g, config_info t WHERE g.id = t.id"; + PaginationHelper helper = createPaginationHelper(); + try { + Page page = helper + .fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_WRAPPER_ROW_MAPPER); + + return page.getPageItems(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1539c +3:1517c + return null; +====1 +1:2448,2458c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,type,gmt_modified,md5 FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1556,1560c +3:1534,1538c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + ConfigInfoEntity result = configInfoRepository.findOne( + qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))).orElse(null); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapper(result); +====1 +1:2463,2469c + String sql = String.format("select 1 from %s limit 1", tableName); + try { + jt.queryForObject(sql, Integer.class); + return true; + } catch (Throwable e) { + return false; + } +2:1565c +3:1543c + return true; +====1 +1:2518,2530c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList<>(); + if (!CollectionUtils.isEmpty(ids)) { + where.append(" id in ("); + for (int i = 0; i < ids.size(); i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(ids.get(i)); + } + where.append(") "); +2:1614,1617c +3:1592,1595c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (!org.springframework.util.CollectionUtils.isEmpty(ids)) { + booleanBuilder.and(qConfigInfo.id.in(ids)); +====1 +1:2532,2536c + where.append(" tenant_id=? "); + paramList.add(tenantTmp); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + paramList.add(generateLikeArgument(dataId)); +2:1619,1623c +3:1597,1601c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.like(dataId)); +====1 +1:2539,2540c + where.append(" and group_id=? "); + paramList.add(group); +2:1626c +3:1604c + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:2543,2544c + where.append(" and app_name=? "); + paramList.add(appName); +2:1629c +3:1607c + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:2547,2552c + try { + return this.jt.query(SQL_FIND_ALL_CONFIG_INFO + where, paramList.toArray(), CONFIG_ALL_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1632,1640c +3:1610,1618c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + List resultList = new ArrayList<>(); + configInfos.forEach(s -> { + ConfigAllInfo configAllInfo = new ConfigAllInfo(); + BeanUtils.copyProperties(s, configAllInfo); + configAllInfo.setGroup(s.getGroupId()); + resultList.add(configAllInfo); + }); + return resultList; +====1 +1:2643,2647c + Integer result = this.jt + .queryForObject(SQL_TENANT_INFO_COUNT_BY_TENANT_ID, new String[] {tenantId}, Integer.class); + if (result == null) { + return 0; + } +2:1731,1732c +3:1709,1710c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + Long result = tenantInfoRepository.count(qTenantInfo.tenantId.eq(tenantId)); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_MergeDatumService.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_MergeDatumService.java.txt new file mode 100644 index 0000000000..0fe966b505 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_MergeDatumService.java.txt @@ -0,0 +1,43 @@ +====1 +1:30,31c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:30,32c +3:30,32c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.ApplicationUtils; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:109c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIp()); +2:110c +3:110c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIP()); +====1 +1:117c + if (ApplicationUtils.getStandaloneMode()) { +2:118c +3:118c + if (EnvUtil.getStandaloneMode()) { +==== +1:166c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); +2:167,175c + <<<<<<< HEAD + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn( + "[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + ||||||| a41d209d5 + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn("[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + ======= + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIP(), null); +3:167c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIP(), null); +====2 +1:168c +3:169c + + group); +2:177,178c + >>>>>>> TEMP_RIGHT_BRANCH + + group); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_MergeTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_MergeTaskProcessor.java.txt new file mode 100644 index 0000000000..5df60ac501 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_MergeTaskProcessor.java.txt @@ -0,0 +1,47 @@ +==== +1:19a +2:20,26c + <<<<<<< HEAD + import com.alibaba.nacos.common.task.AbstractDelayTask; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; + ======= + import com.alibaba.nacos.common.task.NacosTask; +3:20c + import com.alibaba.nacos.common.task.NacosTask; +==== +1:21c + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:28c + >>>>>>> TEMP_RIGHT_BRANCH +3:21a +====1 +1:22a +2:30c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:31c + import com.alibaba.nacos.core.utils.InetUtils; +2:39c +3:32c + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:55c + public boolean process(AbstractDelayTask task) { +2:63c +3:56c + public boolean process(NacosTask task) { +====1 +1:87c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +2:95c +3:88c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), +====1 +1:101c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +2:109c +3:102c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_NotifyTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_NotifyTaskProcessor.java.txt new file mode 100644 index 0000000000..9dcd6442e3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_NotifyTaskProcessor.java.txt @@ -0,0 +1,67 @@ +==== +1:19a +2:20,26c + <<<<<<< HEAD + import com.alibaba.nacos.common.task.AbstractDelayTask; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; + ======= + import com.alibaba.nacos.common.task.NacosTask; +3:20c + import com.alibaba.nacos.common.task.NacosTask; +==== +1:21c + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:28c + >>>>>>> TEMP_RIGHT_BRANCH +3:21a +====1 +1:22a +2:30c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:27,28c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:35,36c +3:28,29c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:49c + public boolean process(AbstractDelayTask task) { +2:57c +3:50c + public boolean process(NacosTask task) { +====1 +1:76c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +2:84c +3:77c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====1 +1:78c + .format(URL_PATTERN, serverIp, ApplicationUtils.getContextPath(), dataId, group); +2:86c +3:79c + .format(URL_PATTERN, serverIp, EnvUtil.getContextPath(), dataId, group); +====1 +1:82c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:90c +3:83c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====1 +1:92c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:100c +3:93c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====1 +1:100c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:108c +3:101c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_application.properties.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_application.properties.txt new file mode 100644 index 0000000000..d78b420770 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_application.properties.txt @@ -0,0 +1,89 @@ +====1 +1:40,41c + # db.user=nacos + # db.password=nacos +2:40,41c +3:40,41c + # db.user.0=nacos + # db.password.0=nacos +====1 +1:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-fe/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +2:112c +3:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-ui/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +==== +1:177a +2:178,214c + + <<<<<<< HEAD + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + + ||||||| a41d209d5 + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:178,211c + + + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..99b5611a8e --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_ignorespace/diff_pom.xml.txt @@ -0,0 +1,169 @@ +====1 +1:25c + 1.4.0-SNAPSHOT +2:25c +3:25c + 1.4.1-SNAPSHOT +====1 +1:39c + nacos-all-1.4.0-SNAPSHOT +2:39c +3:39c + nacos-all-1.4.1-SNAPSHOT +====1 +1:129c + 2.1.16.RELEASE +2:129c +3:129c + 2.1.17.RELEASE +====1 +1:131c + 2.6 +2:130a +3:130a +====1 +1:133c + 2.2 +2:132c +3:132c + 2.6 +====1 +1:144c + 1.7.17 +2:142a +3:142a +====1 +1:170a +2:169,175c +3:169,175c + 1.3.2.beta1 + 1.3.2.beta1 + 1.3.1.Final + 19.3.0.0 + 4.2.1 + 3.4.1 + 1.18.12 +====1 +1:279a +2:285,287c +3:285,287c + **/com/alibaba/nacos/config/server/modules/entity/*.java + **/com/alibaba/nacos/config/server/modules/mapstruct/*.java + **/com/alibaba/nacos/config/server/configuration/datasource/DynamicDataSource.java +==== +1:307c + **/istio/model/**,**/nacos/test/** +2:315,321c + <<<<<<< HEAD + **/istio/model/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** + ||||||| a41d209d5 + **/istio/model/**,**/nacos/test/** + ======= + **/istio/model/**,**/consistency/entity/**,**/nacos/test/** + >>>>>>> TEMP_RIGHT_BRANCH +3:315c + **/istio/model/**,**/consistency/entity/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** +====1 +1:341a +2:356c +3:350c + /console-ui/** +====1 +1:553a +2:569,587c +3:563,581c + + remove-test-data + + + + org.apache.maven.plugins + maven-clean-plugin + + false + + + ${user.home}/nacos/data + + + + + + + +====1 +1:581a +2:616c +3:610c + sys +====1 +1:688a +2:724,728c +3:718,722c + + ${project.groupId} + nacos-sys + ${project.version} + +====1 +1:712,717c + + commons-lang + commons-lang + ${commons-lang.version} + + +2:751a +3:745a +====1 +1:817,822c + + com.ning + async-http-client + ${async-http-client.version} + + +2:850a +3:844a +====1 +1:1027a +2:1056,1093c +3:1050,1087c + + + org.mapstruct + mapstruct-jdk8 + ${mapstruct.version} + + + + org.mapstruct + mapstruct-processor + ${mapstruct.version} + + + + org.projectlombok + lombok + true + ${lombok.version} + + + + com.querydsl + querydsl-jpa + ${querydsl.version} + + + + com.zaxxer + HikariCP + ${hikariCP.version} + + + + com.oracle.ojdbc + ojdbc8 + ${ojdbc.version} + + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_AsyncNotifyService.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_AsyncNotifyService.java.txt new file mode 100644 index 0000000000..b0d839171d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_AsyncNotifyService.java.txt @@ -0,0 +1,74 @@ +====3 +1:35,36c +2:35,36c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +3:35,36c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:107c + +2:107c +3:107c + +====3 +1:130c +2:130c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +3:130c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +==== +1:136,137c + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +2:136,138c + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, + String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +3:136,137c + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====1 +1:157c + +2:158c +3:157c + +====3 +1:171c +2:172c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +3:171c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +====3 +1:177c +2:178c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +3:177c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +====3 +1:199c +2:200c + InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +3:199c + InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +====3 +1:262c +2:263c + this.url = MessageFormat.format(URL_PATTERN, target, ApplicationUtils.getContextPath(), dataId, group); +3:262c + this.url = MessageFormat.format(URL_PATTERN, target, EnvUtil.getContextPath(), dataId, group); +====3 +1:265c +2:266c + .format(URL_PATTERN_TENANT, target, ApplicationUtils.getContextPath(), dataId, group, tenant); +3:265c + .format(URL_PATTERN_TENANT, target, EnvUtil.getContextPath(), dataId, group, tenant); +====1 +1:312c + } +\ No newline at end of file +2:313c +3:312c + } diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_ConfigController.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_ConfigController.java.txt new file mode 100644 index 0000000000..b8fee64e3d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_ConfigController.java.txt @@ -0,0 +1,183 @@ +====3 +1:18a +2:18a +3:19c + import com.alibaba.nacos.api.config.ConfigType; +====3 +1:22a +2:22a +3:24c + import com.alibaba.nacos.common.model.RestResultUtils; +====1 +1:34a +2:35c +3:37c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +====1 +1:38c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +2:38a +3:40a +====3 +1:45a +2:45a +3:48c + import com.alibaba.nacos.common.utils.NamespaceUtil; +====3 +1:48c +2:48c + import com.alibaba.nacos.core.utils.InetUtils; +3:51c + import com.alibaba.nacos.sys.utils.InetUtils; +====3 +1:93,94c +2:93,94c + private static final String NAMESPACE_PUBLIC_KEY = "public"; + +3:95a +====3 +1:137a +2:137a +3:139,142c + //check type + if (!ConfigType.isValidType(type)) { + type = ConfigType.getDefaultType().getType(); + } +====3 +1:178c +2:178c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIp(), +3:183c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====3 +1:199c +2:199c + tenant = processTenant(tenant); +3:204c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====3 +1:283c +2:283c + return ResultBuilder.buildSuccessResult(true); +3:288c + return RestResultUtils.success(true); +====3 +1:472c +2:472c + tenant = processTenant(tenant); +3:477c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====3 +1:527c +2:527c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +3:532c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====3 +1:530,534c +2:530,534c + if (StringUtils.isNotBlank(namespace)) { + if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); + } +3:535,538c + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====3 +1:535a +2:535a +3:540c + +====3 +1:548c +2:548c + return ResultBuilder.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +3:553c + return RestResultUtils.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +====3 +1:560c +2:560c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +3:565c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +====3 +1:584c +2:584c + return ResultBuilder.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +3:589c + return RestResultUtils.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +====3 +1:588c +2:588c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +3:593c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====3 +1:601c +2:601c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +3:606c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====3 +1:604c +2:604c + return ResultBuilder.buildSuccessResult("导入成功", saveResult); +3:609c + return RestResultUtils.success("导入成功", saveResult); +====3 +1:628c +2:628c + return ResultBuilder.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +3:633c + return RestResultUtils.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +====3 +1:631,634c +2:631,634c + + if (NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(namespace)) { + namespace = ""; + } else if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { +3:636,638c + + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { +====3 +1:636c +2:636c + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +3:640c + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====3 +1:650c +2:650c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +3:654c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====3 +1:674c +2:674c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +3:678c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====3 +1:687c +2:687c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +3:691c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====3 +1:690,697c +2:690,697c + return ResultBuilder.buildSuccessResult("Clone Completed Successfully", saveResult); + } + + private String processTenant(String tenant) { + if (StringUtils.isEmpty(tenant) || NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(tenant)) { + return ""; + } + return tenant; +3:694c + return RestResultUtils.success("Clone Completed Successfully", saveResult); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_DiskUtils.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_DiskUtils.java.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_EmbeddedRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_EmbeddedRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..c5d1158c4b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_EmbeddedRolePersistServiceImpl.java.txt @@ -0,0 +1,163 @@ +====1 +1:21c + import com.alibaba.nacos.config.server.service.repository.embedded.EmbeddedStoragePersistServiceImpl; +2:20a +3:20a +====1 +1:23a +2:23c +3:23c + import com.alibaba.nacos.config.server.service.repository.embedded.EmbeddedStoragePersistServiceImpl; +====3 +1:30a +2:30a +3:31c + import java.util.Collections; +====2 +1:43c +3:44c + +2:43c + +====2 +1:46c +3:47c + +2:46c + +====2 +1:49c +3:50c + +2:49c + +====2 +1:51c +3:52c + +2:51c + +====2 +1:53c +3:54c + +2:53c + +====2 +1:56c +3:57c + +2:56c + +====2 +1:58c +3:59c + +2:58c + +====2 +1:68c +3:69c + +2:68c + +====2 +1:70c +3:71c + +2:70c + +====2 +1:72c +3:73c + +2:72c + +====2 +1:74c +3:75c + +2:74c + +==== +1:77,80c + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { +2:77,80c + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { +3:78,83c + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { +==== +1:83,84c + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, +2:83,84c + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, +3:86,87c + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, +====2 +1:86c +3:89c + +2:86c + +====2 +1:88c +3:91c + +2:88c + +====2 +1:96c +3:99c + +2:96c + +====2 +1:98c +3:101c + +2:98c + +====2 +1:106c +3:109c + +2:106c + +====2 +1:121c +3:124c + +2:121c + +====2 +1:137c +3:140c + +2:137c + +====2 +1:141c +3:144c + List users = databaseOperate.queryMany(sql, new String[] {"%" + role + "%"}, String.class); +2:141c + List users = databaseOperate.queryMany(sql, new String[]{"%" + role + "%"}, String.class); +====2 +1:144c +3:147c + +2:144c + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_ExternalDataSourceServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_ExternalDataSourceServiceImpl.java.txt new file mode 100644 index 0000000000..0492051512 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_ExternalDataSourceServiceImpl.java.txt @@ -0,0 +1,150 @@ +====1 +1:19,39c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import javax.sql.DataSource; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + +2:18a +3:18a +====3 +1:40a +2:19a +3:20c + import com.alibaba.nacos.common.utils.IPUtil; +====2 +1:43c +3:23c + import com.alibaba.nacos.config.server.utils.ConfigExecutor; +2:21a +====3 +1:45c +2:23c + import com.alibaba.nacos.core.utils.ApplicationUtils; +3:25c + import com.alibaba.nacos.sys.env.EnvUtil; +==== +1:46a +2:25,42c + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +3:27,41c + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +====3 +1:55,58c +2:51,54c + private static final Logger LOGGER = LoggerFactory.getLogger(ExternalDataSourceServiceImpl.class); + + private static final String JDBC_DRIVER_NAME = "com.mysql.cj.jdbc.Driver"; + +3:49a +====3 +1:86,87c +2:82,83c + private static Pattern ipPattern = Pattern.compile("\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}"); + +3:76a +====1 +1:120,122c + + ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +2:116,118c +3:109,111c + + // ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + // ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +====3 +1:130c +2:126c + .build(ApplicationUtils.getEnvironment(), (dataSource) -> { +3:119c + .build(EnvUtil.getEnvironment(), (dataSource) -> { +====3 +1:193c +2:189c + return "DOWN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +3:182c + return "DOWN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====3 +1:196c +2:192c + return "WARN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +3:185c + return "WARN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====3 +1:204,217c +2:200,213c + private String getIpFromUrl(String url) { + + Matcher m = ipPattern.matcher(url); + if (m.find()) { + return m.group(); + } + + return ""; + } + + static String defaultIfNull(String value, String defaultValue) { + return null == value ? defaultValue : value; + } + +3:192a +====3 +1:272c +2:268c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +3:247c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); +====3 +1:275c +2:271c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +3:250c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_ExternalPermissionPersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_ExternalPermissionPersistServiceImpl.java.txt new file mode 100644 index 0000000000..5424bf2de0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_ExternalPermissionPersistServiceImpl.java.txt @@ -0,0 +1,128 @@ +==== +1:21,24c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; +2:21,24c + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; +3:21,28c + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; +====1 +1:27,28c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; +2:27c +3:31c + import org.springframework.data.domain.PageRequest; +==== +1:31,35c + import javax.annotation.PostConstruct; + import java.util.ArrayList; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.PERMISSION_ROW_MAPPER; + +2:29a +3:34,40c + import javax.annotation.PostConstruct; + import java.util.ArrayList; + import java.util.Collections; + import java.util.List; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.PERMISSION_ROW_MAPPER; + +====1 +1:46,53c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:40c +3:51c + private PermissionsRepository permissionsRepository; +====1 +1:56,83c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role='" + role + "' "; + + if (StringUtils.isBlank(role)) { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:43,50c +3:54,61c + org.springframework.data.domain.Page sPage = permissionsRepository + .findAll(QPermissionsEntity.permissionsEntity.role.eq(role), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(PermissionsMapStruct.INSTANCE.convertPermissionInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:94,102c + + String sql = "INSERT into permissions (role, resource, action) VALUES (?, ?, ?)"; + + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:61,62c +3:72,73c + + permissionsRepository.save(new PermissionsEntity(role, resource, action)); +====1 +1:113,120c + + String sql = "DELETE from permissions WHERE role=? and resource=? and action=?"; + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:73,77c +3:84,88c + + QPermissionsEntity qPermissions = QPermissionsEntity.permissionsEntity; + permissionsRepository.findOne( + qPermissions.role.eq(role).and(qPermissions.resource.eq(resource)).and(qPermissions.action.eq(action))) + .ifPresent(p -> permissionsRepository.delete(p)); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_ExternalRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_ExternalRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..17328923af --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_ExternalRolePersistServiceImpl.java.txt @@ -0,0 +1,204 @@ +==== +1:21,24c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; +2:21,24c + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; +3:21,28c + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; +====1 +1:27,29c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.RowMapper; +2:27c +3:31c + import org.springframework.data.domain.PageRequest; +==== +1:32,35c + import javax.annotation.PostConstruct; + import java.sql.ResultSet; + import java.sql.SQLException; + import java.util.ArrayList; +2:29a +3:34,38c + import javax.annotation.PostConstruct; + import java.sql.ResultSet; + import java.sql.SQLException; + import java.util.ArrayList; + import java.util.Collections; +====1 +1:36a +2:31c +3:40c + import java.util.stream.Collectors; +====1 +1:38c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.ROLE_INFO_ROW_MAPPER; +2:32a +3:41a +====1 +1:50,57c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:44c +3:53c + private RolesRepository rolesRepository; +====1 +1:61,81c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from (select distinct role from roles) roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " 1=1 "; + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + return pageInfo; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:48,55c +3:57,64c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:86,104c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + where = " 1=1 "; + } + + try { + return helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:60,67c +3:69,76c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(QRolesEntity.rolesEntity.username.eq(username), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:110c + * @param role role string value. +2:73c +3:82c + * @param role role string value. +====1 +1:115,122c + String sql = "INSERT into roles (role, username) VALUES (?, ?)"; + + try { + jt.update(sql, role, userName); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:78c +3:87c + rolesRepository.save(new RolesEntity(userName, role)); +====1 +1:131,137c + String sql = "DELETE from roles WHERE role=?"; + try { + jt.update(sql, role); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:87,88c +3:96,97c + Iterable iterable = rolesRepository.findAll(QRolesEntity.rolesEntity.role.eq(role)); + rolesRepository.deleteAll(iterable); +====1 +1:143c + * @param role role string value. +2:94c +3:103c + * @param role role string value. +====1 +1:147,153c + String sql = "DELETE from roles WHERE role=? and username=?"; + try { + jt.update(sql, role, username); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:98,100c +3:107,109c + QRolesEntity qRoles = QRolesEntity.rolesEntity; + rolesRepository.findOne(qRoles.role.eq(role).and(qRoles.username.eq(username))) + .ifPresent(s -> rolesRepository.delete(s)); +====1 +1:158,160c + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[]{role}, String.class); + return users; +2:105,107c +3:114,116c + List rolesEntities = (List) rolesRepository + .findAll(QRolesEntity.rolesEntity.role.like(role)); + return rolesEntities.stream().map(s -> s.getRole()).collect(Collectors.toList()); +====1 +1:163,172c + private static final class RoleInfoRowMapper implements RowMapper { + + @Override + public RoleInfo mapRow(ResultSet rs, int rowNum) throws SQLException { + RoleInfo roleInfo = new RoleInfo(); + roleInfo.setRole(rs.getString("role")); + roleInfo.setUsername(rs.getString("username")); + return roleInfo; + } + } +2:109a +3:118a diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_ExternalStoragePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_ExternalStoragePersistServiceImpl.java.txt new file mode 100644 index 0000000000..d4cc7360ae --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_ExternalStoragePersistServiceImpl.java.txt @@ -0,0 +1,2999 @@ +====1 +1:41,42c + import com.alibaba.nacos.config.server.service.datasource.DataSourceService; + import com.alibaba.nacos.config.server.service.datasource.DynamicDataSource; +2:41,74c +3:41,74c + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.HisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.QHisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QTenantInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.TenantInfoEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAdvanceInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAllInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigHistoryInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4BetaMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4TagMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoAggrMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoBetaWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoChangedMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoEntityMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoTagWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.TenantInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoAggrRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoBetaRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoTagRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigTagsRelationRepository; + import com.alibaba.nacos.config.server.modules.repository.HisConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.TenantInfoRepository; +====1 +1:48c + import com.google.common.collect.Lists; +2:80,81c +3:80,81c + import com.querydsl.core.BooleanBuilder; + import lombok.extern.slf4j.Slf4j; +====1 +1:50a +2:84,85c +3:84,85c + import org.springframework.beans.BeanUtils; + import org.springframework.beans.factory.annotation.Autowired; +====1 +1:55c + import org.springframework.dao.IncorrectResultSizeDataAccessException; +2:90,92c +3:90,92c + import org.springframework.data.domain.PageRequest; + import org.springframework.data.domain.Sort; + import org.springframework.data.jpa.domain.Specification; +====1 +1:57,61c + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.PreparedStatementCreator; + import org.springframework.jdbc.core.PreparedStatementSetter; + import org.springframework.jdbc.support.GeneratedKeyHolder; + import org.springframework.jdbc.support.KeyHolder; +2:93a +3:93a +====1 +1:70c + import javax.annotation.PostConstruct; +2:102,105c +3:102,105c + import javax.persistence.criteria.CriteriaBuilder; + import javax.persistence.criteria.CriteriaQuery; + import javax.persistence.criteria.Predicate; + import javax.persistence.criteria.Root; +====1 +1:72,75c + import java.sql.Connection; + import java.sql.PreparedStatement; + import java.sql.SQLException; + import java.sql.Statement; +2:106a +3:106a +====1 +1:81a +2:113c +3:113c + import java.util.stream.Collectors; +====1 +1:83,97c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ADVANCE_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ALL_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4TAG_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_AGGR_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BASE_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_CHANGED_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_KEY_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_DETAIL_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_LIST_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.TENANT_INFO_ROW_MAPPER; +2:114a +3:114a +====1 +1:104a +2:122c +3:122c + @Slf4j +====1 +1:110c + private DataSourceService dataSourceService; +2:128,129c +3:128,129c + @Autowired + private ConfigInfoRepository configInfoRepository; +====1 +1:112c + private static final String SQL_FIND_ALL_CONFIG_INFO = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,c_schema from config_info"; +2:131,132c +3:131,132c + @Autowired + private ConfigInfoBetaRepository configInfoBetaRepository; +====1 +1:114c + private static final String SQL_TENANT_INFO_COUNT_BY_TENANT_ID = "select count(1) from tenant_info where tenant_id = ?"; +2:134,135c +3:134,135c + @Autowired + private ConfigInfoTagRepository configInfoTagRepository; +====1 +1:116c + private static final String SQL_FIND_CONFIG_INFO_BY_IDS = "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5 FROM config_info WHERE "; +2:137,138c +3:137,138c + @Autowired + private ConfigTagsRelationRepository configTagsRelationRepository; +====1 +1:118c + private static final String SQL_DELETE_CONFIG_INFO_BY_IDS = "DELETE FROM config_info WHERE "; +2:140,141c +3:140,141c + @Autowired + private HisConfigInfoRepository hisConfigInfoRepository; +====1 +1:120c + private static final String PATTERN_STR = "*"; +2:143,144c +3:143,144c + @Autowired + private TenantInfoRepository tenantInfoRepository; +====1 +1:122c + private static final int QUERY_LIMIT_SIZE = 50; +2:146,147c +3:146,147c + @Autowired + private ConfigInfoAggrRepository configInfoAggrRepository; +====1 +1:124,126c + protected JdbcTemplate jt; + + protected TransactionTemplate tjt; +2:149,150c +3:149,150c + @Autowired + private TransactionTemplate tjt; +====1 +1:133,171c + /** + * init datasource. + */ + @PostConstruct + public void init() { + dataSourceService = DynamicDataSource.getInstance().getDataSource(); + + jt = getJdbcTemplate(); + tjt = getTransactionTemplate(); + } + + public boolean checkMasterWritable() { + return dataSourceService.checkMasterWritable(); + } + + public void setBasicDataSourceService(DataSourceService dataSourceService) { + this.dataSourceService = dataSourceService; + } + + public synchronized void reload() throws IOException { + this.dataSourceService.reload(); + } + + /** + * For unit testing. + */ + public JdbcTemplate getJdbcTemplate() { + return this.dataSourceService.getJdbcTemplate(); + } + + public TransactionTemplate getTransactionTemplate() { + return this.dataSourceService.getTransactionTemplate(); + } + + @SuppressWarnings("checkstyle:AbbreviationAsWordInName") + public String getCurrentDBUrl() { + return this.dataSourceService.getCurrentDbUrl(); + } + +2:156a +3:156a +====1 +1:174c + return new ExternalStoragePaginationHelperImpl(jt); +2:159c +3:159c + return null; +====1 +1:182,191c + boolean result = tjt.execute(status -> { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:167,183c +3:167,183c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfoEntity.getGroupId(), + configInfoEntity.getTenantId()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; + } + return Boolean.TRUE; +====1 +1:193c + return Boolean.TRUE; +2:184a +3:184a +====1 +1:202c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:193c +3:193c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:204,207c + jt.update("INSERT INTO config_info_beta(data_id,group_id,tenant_id,app_name,content,md5,beta_ips,src_ip," + + "src_user,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, appNameTmp, configInfo.getContent(), md5, betaIps, srcIp, srcUser, + time, time); +2:195,207c +3:195,207c + ConfigInfoBetaEntity configInfoBeta = new ConfigInfoBetaEntity(); + configInfoBeta.setDataId(configInfo.getDataId()); + configInfoBeta.setGroupId(configInfo.getGroup()); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setBetaIps(betaIps); + configInfoBeta.setMd5(md5); + configInfoBeta.setGmtCreate(time); + configInfoBeta.setGmtModified(time); + configInfoBeta.setSrcUser(srcUser); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setTenantId(tenantTmp); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:209c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:209c +3:209c + log.error("[db-error] " + e.toString(), e); +====1 +1:220c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:219a +3:219a +====1 +1:222,226c + jt.update( + "INSERT INTO config_info_tag(data_id,group_id,tenant_id,tag_id,app_name,content,md5,src_ip,src_user," + + "gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, tagTmp, appNameTmp, configInfo.getContent(), md5, srcIp, srcUser, + time, time); +2:221,234c +3:221,234c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoTagEntity configInfoTag = new ConfigInfoTagEntity(); + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:228c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:236c +3:236c + log.error("[db-error] " + e.toString(), e); +====1 +1:236,254c + boolean result = tjt.execute(status -> { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + /* + If the appName passed by the user is not empty, use the persistent user's appName, + otherwise use db; when emptying appName, you need to pass an empty string + */ + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // delete all tags and then recreate + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); +2:244,268c +3:244,268c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + // 用户传过来的appName不为空,则用持久化用户的appName,否则用db的;清空appName的时候需要传空串 + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + configInfo.setId(oldConfigInfo.getId()); + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // 删除所有tag,然后再重新创建 + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); + } + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:256,259c + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:270c +3:270c + return Boolean.TRUE; +====1 +1:261c + return Boolean.TRUE; +2:271a +3:271a +====1 +1:268c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); +2:277a +3:277a +====1 +1:270,275c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + try { + jt.update( + "UPDATE config_info_beta SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5, srcIp, srcUser, + time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp); +2:279,300c +3:279,300c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(configInfo.getDataId())) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(configInfo.getDataId())); + } + if (StringUtils.isNotBlank(configInfo.getGroup())) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(configInfo.getGroup())); + } + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenantTmp)); + } + ConfigInfoBetaEntity configInfoBeta = configInfoBetaRepository.findOne(booleanBuilder).orElse(null); + try { + String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setMd5(md5); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setSrcUser(srcUser); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:277c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:302c +3:302c + log.error("[db-error] " + e.toString(), e); +====1 +1:288,293c + try { + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + jt.update( + "UPDATE config_info_tag SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", configInfo.getContent(), md5, + srcIp, srcUser, time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp, tagTmp); +2:313,331c +3:313,331c + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + ConfigInfoTagEntity configInfoTag = configInfoTagRepository.findOne( + qConfigInfoTag.dataId.eq(configInfo.getDataId()).and(qConfigInfoTag.groupId.eq(configInfo.getGroup())) + .and(qConfigInfoTag.tenantId.eq(tenantTmp)).and(qConfigInfoTag.tagId.eq(tagTmp))) + .orElse(new ConfigInfoTagEntity()); + try { + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:295c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:333c +3:333c + log.error("[db-error] " + e.toString(), e); +====1 +1:323,330c + try { + jt.update( + "UPDATE config_info SET md5 = ? WHERE data_id=? AND group_id=? AND tenant_id=? AND gmt_modified=?", + md5, dataId, group, tenantTmp, lastTime); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:361,378c +3:361,378c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (lastTime != null) { + booleanBuilder.and(qConfigInfo.gmtModified.eq(lastTime)); + } + configInfoRepository.findOne(booleanBuilder).ifPresent(config -> { + config.setMd5(md5); + configInfoRepository.save(config); + }); +====1 +1:416,421c + tjt.execute(status -> { + try { + ConfigInfo configInfo = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo != null) { + jt.update("DELETE FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, + group, tenantTmp); +2:464,474c +3:464,474c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo4Beta configInfo4Beta = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo4Beta != null) { + configInfoBetaRepository.deleteById(configInfo4Beta.getId()); + } + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:423,425c + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:476c +3:476c + return Boolean.TRUE; +====1 +1:427c + return Boolean.TRUE; +2:477a +3:477a +====1 +1:439,442c + String select = "SELECT content FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + String insert = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + String update = "UPDATE config_info_aggr SET content = ? , gmt_modified = ? WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + +2:489,502c +3:489,502c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } +====1 +1:445,446c + String dbContent = jt + .queryForObject(select, new Object[] {dataId, group, tenantTmp, datumId}, String.class); +2:505c +3:505c + ConfigInfoAggrEntity result = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); +====1 +1:448c + if (dbContent != null && dbContent.equals(content)) { +2:507c +3:507c + if (result.getContent() != null && result.getContent().equals(content)) { +====1 +1:451c + return jt.update(update, content, now, dataId, group, tenantTmp, datumId) > 0; +2:510,513c +3:510,513c + result.setContent(content); + result.setGmtModified(now); + configInfoAggrRepository.save(result); + return true; +====1 +1:454c + return jt.update(insert, dataId, group, tenantTmp, datumId, appNameTmp, content, now) > 0; +2:516,526c +3:516,526c + ConfigInfoAggrEntity configInfoAggrEntity = new ConfigInfoAggrEntity(); + configInfoAggrEntity.setDataId(dataId); + configInfoAggrEntity.setGroupId(group); + configInfoAggrEntity.setDatumId(datumId); + configInfoAggrEntity.setContent(content); + configInfoAggrEntity.setGmtModified(now); + configInfoAggrEntity.setAppName(appNameTmp); + configInfoAggrEntity.setTenantId(tenantTmp); + configInfoAggrRepository.save(configInfoAggrEntity); + return true; + +====1 +1:465,466c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; +2:536a +3:536a +====1 +1:468,482c + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index++, tenantTmp); + ps.setString(index, datumId); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:537a +3:537a +====1 +1:487,502c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=?"; + + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index, tenantTmp); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:542,545c +3:542,545c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); +====1 +1:503a +2:547,553c +3:547,553c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + configInfoAggrRepository.findOne(booleanBuilder).ifPresent(aggr -> configInfoAggrRepository.delete(aggr)); +====1 +1:509,523c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final StringBuilder datumString = new StringBuilder(); + for (String datum : datumList) { + datumString.append("'").append(datum).append("',"); + } + datumString.deleteCharAt(datumString.length() - 1); + final String sql = + "delete from config_info_aggr where data_id=? and group_id=? and tenant_id=? and datum_id in (" + + datumString.toString() + ")"; + try { + jt.update(sql, dataId, group, tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:558a +3:558a +====1 +1:529,536c + String sql = "delete from his_config_info where gmt_modified < ? limit ?"; + PaginationHelper helper = createPaginationHelper(); + try { + helper.updateLimit(sql, new Object[] {startTime, limitSize}); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:564,567c +3:564,567c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository + .findAll(qHisConfigInfo.gmtModified.lt(startTime), PageRequest.of(0, limitSize)); + hisConfigInfoRepository.deleteAll(iterable); +====1 +1:541,542c + String sql = "SELECT COUNT(*) FROM his_config_info WHERE gmt_modified < ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {startTime}); +2:572,573c +3:572,573c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Long result = hisConfigInfoRepository.count(qHisConfigInfo.gmtModified.lt(startTime)); +====1 +1:551c + String sql = "SELECT max(id) FROM config_info"; +2:581a +3:581a +====1 +1:553c + return jt.queryForObject(sql, Long.class); +2:583,584c +3:583,584c + //TODO 关系型特性查询 + return configInfoRepository.findConfigMaxId(); +====1 +1:591,617c + try { + Boolean isReplaceOk = tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + String appNameTmp = appName == null ? "" : appName; + removeAggrConfigInfo(dataId, group, tenant); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + for (Map.Entry datumEntry : datumMap.entrySet()) { + jt.update(sql, dataId, group, tenantTmp, datumEntry.getKey(), appNameTmp, + datumEntry.getValue(), new Timestamp(System.currentTimeMillis())); + } + } catch (Throwable e) { + throw new TransactionSystemException("error in addAggrConfigInfo"); + } + return Boolean.TRUE; + } + }); + if (isReplaceOk == null) { + return false; + } + return isReplaceOk; + } catch (TransactionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:622c +3:622c + return true; +====1 +1:624,636c + String sql = "SELECT DISTINCT data_id, group_id FROM config_info"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:629c +3:629c + return null; +====1 +1:641,651c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,beta_ips FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO4BETA_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:634,647c +3:634,647c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenant)); + } + ConfigInfoBetaEntity configInfoBetaEntity = configInfoBetaRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoBeta data null")); + return ConfigInfo4BetaMapStruct.INSTANCE.convertConfigInfo4Beta(configInfoBetaEntity); +====1 +1:659,668c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,tag_id,app_name,content FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", + new Object[] {dataId, group, tenantTmp, tagTmp}, CONFIG_INFO4TAG_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:655,671c +3:655,671c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + ConfigInfoTagEntity result = configInfoTagRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoTag data null")); + return ConfigInfo4TagMapStruct.INSTANCE.convertConfigInfo4Tag(result); +====1 +1:674,684c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=? AND app_name=?", + new Object[] {dataId, group, tenantTmp, appName}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:677c +3:677c + return null; +====1 +1:690,733c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(group); + paramList.add(tenantTmp); + + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and group_id=? and tenant_id=? "); + if (StringUtils.isNotBlank(configTags)) { + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.group_id=? and a.tenant_id=? "); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sql.append(", "); + } + sql.append("?"); + paramList.add(tagArr[i]); + } + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return this.jt.queryForObject(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:683c +3:683c + return null; +====1 +1:739,748c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, StringUtils.EMPTY}, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:689,695c +3:689,695c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + return configInfoRepository.findOne(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group))).map(s -> { + ConfigInfoBase configInfoBase = new ConfigInfoBase(); + BeanUtils.copyProperties(s, configInfoBase); + configInfoBase.setGroup(s.getGroupId()); + return configInfoBase; + }).orElse(null); +====1 +1:753,762c + try { + return this.jt + .queryForObject("SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE ID=?", + new Object[] {id}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:700,701c +3:700,701c + + return null; +====1 +1:767,776c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5,type FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:706,712c +3:706,712c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:777a +2:714,718c +3:714,718c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity result = configInfoRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfo(result); +====1 +1:783,792c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:724,725c +3:724,725c + + return null; +====1 +1:798,807c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? and app_name=?", + new Object[] {dataId, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:731,732c +3:731,732c + + return null; +====1 +1:813,864c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where data_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:738c +3:738c + return null; +====1 +1:870,871c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); +2:743a +3:743a +====1 +1:874,922c + String sqlCount = "select count(*) from config_info"; + String sql = "select ID,data_id,group_id,tenant_id,app_name,content,type from config_info"; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id"; + sql = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id"; + + where.append(" a.tenant_id=? "); + + if (StringUtils.isNotBlank(dataId)) { + where.append(" and a.data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and a.group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and a.app_name=? "); + paramList.add(appName); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id=? "); + if (StringUtils.isNotBlank(dataId)) { + where.append(" and data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and app_name=? "); + paramList.add(appName); + } +2:746,765c +3:746,765c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + } + + private void buildConfigInfoCommonCondition(BooleanBuilder booleanBuilder, QConfigInfoEntity qConfigInfo, + final String dataId, final String group, final String appName) { + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); +====1 +1:924,929c + try { + return helper.fetchPage(sqlCount + where, sql + where, paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:767,771c +3:767,771c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(appName)) { + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:935,943c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:777c +3:777c + return null; +====1 +1:949,958c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:783c +3:783c + return null; +====1 +1:964,973c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=? and app_name =?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? and app_name =?", + new Object[] {group, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:789c +3:789c + return null; +====1 +1:979,1032c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder( + "select count(*) from config_info where group_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(group); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:795c +3:795c + return null; +====1 +1:1038,1047c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where tenant_id like ? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? and app_name=?", + new Object[] {generateLikeArgument(tenantTmp), appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:801c +3:801c + return null; +====1 +1:1053,1104c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where tenant_id like ? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:807c +3:807c + return null; +====1 +1:1110,1118c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:813c +3:813c + return null; +====1 +1:1123,1124c + String sql = " SELECT COUNT(ID) FROM config_info "; + Integer result = jt.queryForObject(sql, Integer.class); +2:818c +3:818c + Long result = configInfoRepository.count(); +====1 +1:1133,1134c + String sql = " SELECT COUNT(ID) FROM config_info where tenant_id like ?"; + Integer result = jt.queryForObject(sql, new Object[] {tenant}, Integer.class); +2:827,828c +3:827,828c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.tenantId.like(tenant)); +====1 +1:1143,1144c + String sql = " SELECT COUNT(ID) FROM config_info_beta "; + Integer result = jt.queryForObject(sql, Integer.class); +2:837c +3:837c + Long result = configInfoBetaRepository.count(); +====1 +1:1153,1154c + String sql = " SELECT COUNT(ID) FROM config_info_tag "; + Integer result = jt.queryForObject(sql, Integer.class); +2:846c +3:846c + Long result = configInfoTagRepository.count(); +====1 +1:1162,1165c + public List getTenantIdList(int page, int pageSize) { + String sql = "SELECT tenant_id FROM config_info WHERE tenant_id != '' GROUP BY tenant_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:854,864c +3:854,864c + public List getTenantIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("tenantId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1169,1172c + public List getGroupIdList(int page, int pageSize) { + String sql = "SELECT group_id FROM config_info WHERE tenant_id ='' GROUP BY group_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:868,878c +3:868,878c + public List getGroupIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("groupId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1178,1179c + String sql = " SELECT COUNT(ID) FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {dataId, group, tenantTmp}); +2:884,886c +3:884,886c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))); +====1 +1:1188,1213c + if (datumIds == null || datumIds.isEmpty()) { + return 0; + } + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder sql = new StringBuilder( + " SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ? and datum_id"); + if (isIn) { + sql.append(" in ("); + } else { + sql.append(" not in ("); + } + for (int i = 0, size = datumIds.size(); i < size; i++) { + if (i > 0) { + sql.append(", "); + } + sql.append("?"); + } + sql.append(")"); + + List objectList = Lists.newArrayList(dataId, group, tenantTmp); + objectList.addAll(datumIds); + Integer result = jt.queryForObject(sql.toString(), Integer.class, objectList.toArray()); + if (result == null) { + throw new IllegalArgumentException("aggrConfigInfoCount error"); + } + return result.intValue(); +2:895c +3:895c + return 0; +====1 +1:1228,1242c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5 " + + " FROM ( SELECT id FROM config_info WHERE tenant_id like ? ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, + new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:910c +3:910c + return null; +====1 +1:1247,1282c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String select = " SELECT data_id,group_id,app_name FROM ( " + + " SELECT id FROM config_info WHERE tenant_id LIKE ? ORDER BY id LIMIT ?, ? )" + + " g, config_info t WHERE g.id = t.id "; + + final int totalCount = configInfoCount(tenant); + int pageCount = totalCount / pageSize; + if (totalCount > pageSize * pageCount) { + pageCount++; + } + + if (pageNo > pageCount) { + return null; + } + + final Page page = new Page(); + page.setPageNumber(pageNo); + page.setPagesAvailable(pageCount); + page.setTotalCount(totalCount); + + try { + List result = jt + .query(select, new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, + // new Object[0], + CONFIG_KEY_ROW_MAPPER); + + for (ConfigKey item : result) { + page.getPageItems().add(item); + } + return page; + } catch (EmptyResultDataAccessException e) { + return page; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:915c +3:915c + return null; +====1 +1:1288,1300c + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,content,md5" + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) " + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:921c +3:921c + return null; +====1 +1:1305,1319c + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = " SELECT t.id,type,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + + List params = new ArrayList(); + + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, params.toArray(), pageNo, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:926c +3:926c + return null; +====1 +1:1324,1332c + String select = "SELECT id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,type from config_info where id > ? order by id asc limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(select, new Object[] {lastMaxId, 0, pageSize}, 1, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:931,940c +3:931,940c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(qConfigInfo.id.gt(lastMaxId), PageRequest.of(0, pageSize, Sort.by(Sort.Order.asc("id")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1337,1349c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_beta"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,beta_ips " + + " FROM ( SELECT id FROM config_info_beta ORDER BY id LIMIT ?,? )" + + " g, config_info_beta t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:945,952c +3:945,952c + org.springframework.data.domain.Page sPage = configInfoBetaRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoBetaWrapperMapStruct.INSTANCE.convertConfigInfoBetaWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1354,1366c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_tag"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,tag_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info_tag ORDER BY id LIMIT ?,? ) " + + "g, config_info_tag t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:957,964c +3:957,964c + org.springframework.data.domain.Page sPage = configInfoTagRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoTagWrapperMapStruct.INSTANCE.convertConfigInfoTagWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1372,1414c + // assert dataids group not null + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + // if dataids empty return empty list + if (CollectionUtils.isEmpty(dataIds)) { + return Collections.emptyList(); + } + + // Batch query limit + // The number of in is controlled within 100, the shorter the length of the SQL statement, the better + if (subQueryLimit > QUERY_LIMIT_SIZE) { + subQueryLimit = 50; + } + List result = new ArrayList(dataIds.size()); + + String sqlStart = "select data_id, group_id, tenant_id, app_name, content from config_info where group_id = ? and tenant_id = ? and data_id in ("; + String sqlEnd = ")"; + StringBuilder subQuerySql = new StringBuilder(); + + for (int i = 0; i < dataIds.size(); i += subQueryLimit) { + // dataids + List params = new ArrayList( + dataIds.subList(i, i + subQueryLimit < dataIds.size() ? i + subQueryLimit : dataIds.size())); + + for (int j = 0; j < params.size(); j++) { + subQuerySql.append("?"); + if (j != params.size() - 1) { + subQuerySql.append(","); + } + } + + // group + params.add(0, group); + params.add(1, tenantTmp); + + List r = this.jt + .query(sqlStart + subQuerySql.toString() + sqlEnd, params.toArray(), CONFIG_INFO_ROW_MAPPER); + + // assert not null + if (r != null && r.size() > 0) { + result.addAll(r); + } + } + return result; +2:970c +3:970c + return null; +====1 +1:1420,1463c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + if (StringUtils.isBlank(appName)) { + return this.findAllConfigInfo(pageNo, pageSize, tenantTmp); + } else { + return this.findConfigInfoByApp(pageNo, pageSize, tenantTmp, appName); + } + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + where += " and tenant_id like ? "; + params.add(generateLikeArgument(tenantTmp)); + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:976c +3:976c + return null; +====1 +1:1469,1562c + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + for (ConfigKey configInfo : configKeys) { + String dataId = configInfo.getDataId(); + String group = configInfo.getGroup(); + String appName = configInfo.getAppName(); + + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + return helper.fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:982c +3:982c + return null; +====1 +1:1572,1636c + PaginationHelper helper = createPaginationHelper(); + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info"; + StringBuilder where = new StringBuilder(" where "); + List params = new ArrayList(); + params.add(generateLikeArgument(tenantTmp)); + if (StringUtils.isNotBlank(configTags)) { + sqlCountRows = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id "; + sqlFetchRows = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join config_tags_relation b on a.id=b.id "; + + where.append(" a.tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and a.data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and a.group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and a.app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and a.content like ? "); + params.add(generateLikeArgument(content)); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + params.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and content like ? "); + params.add(generateLikeArgument(content)); + } + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:992,1008c +3:992,1008c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.like(tenant)); + } + if (StringUtils.isNotBlank(content)) { + booleanBuilder.and(qConfigInfo.content.like(content)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1642,1672c + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + throw new IOException("invalid param"); + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,content from config_info where "; + String where = " 1=1 and tenant_id='' "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1014c +3:1014c + return null; +====1 +1:1678,1691c + String sql = "SELECT id,data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; + + try { + return this.jt + .queryForObject(sql, new Object[] {dataId, group, tenantTmp, datumId}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + // EmptyResultDataAccessException, indicating that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); +2:1020,1026c +3:1020,1026c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); +====1 +1:1692a +2:1028,1036c +3:1028,1036c + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenantTmp)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } + + ConfigInfoAggrEntity configInfoAggrEntity = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggr(configInfoAggrEntity); +====1 +1:1697,1710c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "SELECT data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? ORDER BY datum_id"; + + try { + return this.jt.query(sql, new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1041c +3:1041c + return null; +====1 +1:1717,1730c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where data_id=? and " + + "group_id=? and tenant_id=? order by datum_id limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, new Object[] {dataId, group, tenantTmp}, sqlFetchRows, + new Object[] {dataId, group, tenantTmp, (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_AGGR_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1048,1058c +3:1048,1058c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + org.springframework.data.domain.Page sPage = configInfoAggrRepository.findAll( + qConfigInfoAggr.dataId.eq(dataId).and(qConfigInfoAggr.groupId.eq(group)) + .and(qConfigInfoAggr.tenantId.eq(tenantTmp)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.by("datumId")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggrList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1737,1831c + String sqlCountRows = "select count(*) from config_info_aggr where "; + String sqlFetchRows = "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + + for (ConfigKey configInfoAggr : configKeys) { + String dataId = configInfoAggr.getDataId(); + String group = configInfoAggr.getGroup(); + String appName = configInfoAggr.getAppName(); + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + Page result = helper + .fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_AGGR_ROW_MAPPER); + return result; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1065c +3:1065c + return null; +====1 +1:1836,1848c + String sql = "SELECT DISTINCT data_id, group_id, tenant_id FROM config_info_aggr"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_CHANGED_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1070,1071c +3:1070,1071c + List list = configInfoAggrRepository.findAllAggrGroup(); + return ConfigInfoChangedMapStruct.INSTANCE.convertConfigInfoChangedList(list); +====1 +1:1853,1864c + String sql = "SELECT datum_id FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND content = ? "; + + try { + return this.jt.queryForList(sql, new Object[] {dataId, groupId, content}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1076c +3:1076c + return null; +====1 +1:1869,1877c + try { + List> list = jt.queryForList( + "SELECT data_id, group_id, tenant_id, app_name, content, gmt_modified FROM config_info WHERE gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertChangeConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1081,1084c +3:1081,1084c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Iterable iterable = configInfoRepository + .findAll(qConfigInfo.gmtModified.goe(startTime).and(qConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList((List) iterable); +====1 +1:1884,1924c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_modified from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + if (!StringUtils.isBlank(tenantTmp)) { + where += " and tenant_id = ? "; + params.add(tenantTmp); + } + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (startTime != null) { + where += " and gmt_modified >=? "; + params.add(startTime); + } + if (endTime != null) { + where += " and gmt_modified <=? "; + params.add(endTime); + } + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + lastMaxId, CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1091c +3:1091c + return null; +====1 +1:1929,1937c + try { + List> list = jt.queryForList( + "SELECT DISTINCT data_id, group_id, tenant_id FROM his_config_info WHERE op_type = 'D' AND gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertDeletedConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1096,1100c +3:1096,1100c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository.findAll( + qHisConfigInfo.opType.eq("D").and(qHisConfigInfo.gmtModified.goe(startTime)) + .and(qHisConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList((List) iterable); +====1 +1:1943,1947c + final String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + final String tenantTmp = + StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + +2:1105a +3:1105a +====1 +1:1953,1960c + + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + + KeyHolder keyHolder = new GeneratedKeyHolder(); + + final String sql = + "INSERT INTO config_info(data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_create," + + "gmt_modified,c_desc,c_use,effect,type,c_schema) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; +2:1111,1120c +3:1111,1120c + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setGmtCreate(time); + configInfoEntity.setGmtModified(time); +====1 +1:1963,1991c + jt.update(new PreparedStatementCreator() { + @Override + public PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + PreparedStatement ps = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS); + ps.setString(1, configInfo.getDataId()); + ps.setString(2, configInfo.getGroup()); + ps.setString(3, tenantTmp); + ps.setString(4, appNameTmp); + ps.setString(5, configInfo.getContent()); + ps.setString(6, md5Tmp); + ps.setString(7, srcIp); + ps.setString(8, srcUser); + ps.setTimestamp(9, time); + ps.setTimestamp(10, time); + ps.setString(11, desc); + ps.setString(12, use); + ps.setString(13, effect); + ps.setString(14, type); + ps.setString(15, schema); + return ps; + } + }, keyHolder); + Number nu = keyHolder.getKey(); + if (nu == null) { + throw new IllegalArgumentException("insert config_info fail"); + } + return nu.longValue(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:1123,1124c +3:1123,1124c + return configInfoRepository.save(configInfoEntity).getId(); + } catch (Exception e) { +====1 +1:2008,2015c + try { + jt.update( + "INSERT INTO config_tags_relation(id,tag_name,tag_type,data_id,group_id,tenant_id) VALUES(?,?,?,?,?,?)", + configId, tagName, null, dataId, group, tenant); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1141,1147c +3:1141,1147c + ConfigTagsRelationEntity configTagsRelation = new ConfigTagsRelationEntity(); + configTagsRelation.setId(configId); + configTagsRelation.setTagName(tagName); + configTagsRelation.setDataId(dataId); + configTagsRelation.setGroupId(group); + configTagsRelation.setTenantId(tenant); + configTagsRelationRepository.save(configTagsRelation); +====1 +1:2020,2025c + try { + jt.update("DELETE FROM config_tags_relation WHERE id=?", id); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1152c +3:1152c + configTagsRelationRepository.findById(id).ifPresent(s -> configTagsRelationRepository.delete(s)); +====1 +1:2030,2040c + String sql = "SELECT tag_name FROM config_tags_relation WHERE tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1157c +3:1157c + return null; +====1 +1:2045,2055c + String sql = "SELECT tag_name FROM config_tags_relation WHERE data_id=? AND group_id=? AND tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {dataId, group, tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1162,1176c +3:1162,1176c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigTagsRelationEntity qConfigTagsRelation = QConfigTagsRelationEntity.configTagsRelationEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigTagsRelation.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigTagsRelation.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigTagsRelation.tenantId.eq(tenant)); + } + Iterable iterable = configTagsRelationRepository.findAll(booleanBuilder); + List result = new ArrayList<>(); + iterable.forEach(s -> result.add(s.getTagName())); + return result; +====1 +1:2061,2067c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + jt.update("DELETE FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, group, + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1182,1187c +3:1182,1187c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); +====1 +1:2068a +2:1189,1190c +3:1189,1190c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + configInfos.forEach(s -> configInfoRepository.delete(s)); +====1 +1:2076,2077c + StringBuilder sql = new StringBuilder(SQL_DELETE_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1198,1200c +3:1198,1200c + if (StringUtils.isBlank(ids)) { + return; + } +====1 +1:2081,2084c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1203a +3:1203a +====1 +1:2087,2093c + sql.append(") "); + try { + jt.update(sql.toString(), paramList.toArray()); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1206,1219c +3:1206,1219c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + for (Long id : paramList) { + configInfoRepository.deleteById(id); + } + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2099,2106c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String tagTmp = StringUtils.isBlank(tag) ? StringUtils.EMPTY : tag; + try { + jt.update("DELETE FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", dataId, + group, tenantTmp, tagTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1225,1234c +3:1225,1234c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); +====1 +1:2107a +2:1236,1251c +3:1236,1251c + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + Iterable configInfoTags = configInfoTagRepository.findAll(booleanBuilder); + configInfoTags.forEach(s -> configInfoTagRepository.delete(s)); + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2113,2115c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String tenantTmp = StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:1257,1258c +3:1257,1258c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:2122,2131c + try { + jt.update("UPDATE config_info SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?," + + "app_name=?,c_desc=?,c_use=?,effect=?,type=?,c_schema=? " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5Tmp, srcIp, srcUser, + time, appNameTmp, desc, use, effect, type, schema, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1265,1272c +3:1265,1272c + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setGmtModified(time); + configInfoRepository.save(configInfoEntity); +====1 +1:2139,2140c + StringBuilder sql = new StringBuilder(SQL_FIND_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1279a +3:1279a +====1 +1:2144,2147c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1282a +3:1282a +====1 +1:2150,2158c + sql.append(") "); + try { + return this.jt.query(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1285,1288c +3:1285,1288c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + List list = (List) configInfoRepository + .findAll(qConfigInfo.id.in(paramList)); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(list); +====1 +1:2163,2176c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAdvanceInfo configAdvance = this.jt.queryForObject( + "SELECT gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_ADVANCE_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1293,1314c +3:1293,1314c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAdvanceInfo configAdvance = ConfigAdvanceInfoMapStruct.INSTANCE.convertConfigAdvanceInfo(configInfo); + List configTagList = this.selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2178c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1315a +3:1315a +====1 +1:2180,2185c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1317c +3:1317c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2186a +2:1319c +3:1319c + return configAdvance; +====1 +1:2191,2206c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAllInfo configAdvance = this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5," + + "gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_ALL_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1324,1346c +3:1324,1346c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAllInfo configAdvance = ConfigAllInfoMapStruct.INSTANCE.convertConfigAllInfo(configInfo); + configAdvance.setGroup(configInfo.getGroupId()); + List configTagList = selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2208c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1347a +3:1347a +====1 +1:2210,2215c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1349c +3:1349c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2216a +2:1351c +3:1351c + return configAdvance; +====1 +1:2225,2233c + try { + jt.update( + "INSERT INTO his_config_info (id,data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_modified,op_type) " + + "VALUES(?,?,?,?,?,?,?,?,?,?,?)", id, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp, appNameTmp, configInfo.getContent(), md5Tmp, srcIp, srcUser, time, ops); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1360,1373c +3:1360,1373c + HisConfigInfoEntity hisConfigInfo = new HisConfigInfoEntity(); + hisConfigInfo.setId(id); + hisConfigInfo.setDataId(configInfo.getDataId()); + hisConfigInfo.setGroupId(configInfo.getGroup()); + hisConfigInfo.setAppName(appNameTmp); + hisConfigInfo.setContent(configInfo.getContent()); + hisConfigInfo.setMd5(md5Tmp); + hisConfigInfo.setGmtModified(time); + hisConfigInfo.setSrcUser(srcUser); + hisConfigInfo.setSrcIp(srcIp); + hisConfigInfo.setOpType(ops); + hisConfigInfo.setTenantId(tenantTmp); + hisConfigInfo.setGmtCreate(time); + hisConfigInfoRepository.save(hisConfigInfo); +====1 +1:2239,2255c + PaginationHelper helper = createPaginationHelper(); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from his_config_info where data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select nid,data_id,group_id,tenant_id,app_name,src_ip,src_user,op_type,gmt_create,gmt_modified from his_config_info " + + "where data_id = ? and group_id = ? and tenant_id = ? order by nid desc"; + + Page page = null; + try { + page = helper + .fetchPage(sqlCountRows, sqlFetchRows, new Object[] {dataId, group, tenantTmp}, pageNo, pageSize, + HISTORY_LIST_ROW_MAPPER); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG + .error("[list-config-history] error, dataId:{}, group:{}", new Object[] {dataId, group}, e); + throw e; + } +2:1379,1389c +3:1379,1389c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + org.springframework.data.domain.Page sPage = hisConfigInfoRepository.findAll( + qHisConfigInfo.dataId.eq(dataId).and(qHisConfigInfo.groupId.eq(group)) + .and(qHisConfigInfo.tenantId.eq(tenant)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("nid")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); +====1 +1:2262,2270c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "INSERT INTO app_configdata_relation_subs(data_id,group_id,app_name,gmt_modified) VALUES(?,?,?,?)", + dataId, group, appNameTmp, date); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1395a +3:1395a +====1 +1:2276,2284c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "UPDATE app_configdata_relation_subs SET gmt_modified=? WHERE data_id=? AND group_id=? AND app_name=?", + time, dataId, group, appNameTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1401c +3:1401c + +==== +1:2289,2300c + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[list-config-history] error, nid:{}", new Object[] {nid}, e); + throw e; + } + } + + @Override +2:1406,1411c + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + } + + @Override +3:1406,1424c + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + } + + @Override + public ConfigHistoryInfo detailPreviousConfigHistory(Long id) { + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = (select max(nid) from his_config_info where id = ?) "; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {id}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[detail-previous-config-history] error, id:{}", new Object[] {id}, e); + throw e; + } + } + + @Override +====1 +1:2303,2310c + try { + jt.update( + "INSERT INTO tenant_info(kp,tenant_id,tenant_name,tenant_desc,create_source,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?)", + kp, tenantId, tenantName, tenantDesc, createResoure, time, time); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1414,1422c +3:1427,1435c + TenantInfoEntity tenantInfo = new TenantInfoEntity(); + tenantInfo.setKp(kp); + tenantInfo.setTenantId(tenantId); + tenantInfo.setTenantName(tenantName); + tenantInfo.setTenantDesc(tenantDesc); + tenantInfo.setCreateSource(createResoure); + tenantInfo.setGmtCreate(time); + tenantInfo.setGmtModified(time); + tenantInfoRepository.save(tenantInfo); +====1 +1:2315,2322c + try { + jt.update( + "UPDATE tenant_info SET tenant_name = ?, tenant_desc = ?, gmt_modified= ? WHERE kp=? AND tenant_id=?", + tenantName, tenantDesc, System.currentTimeMillis(), kp, tenantId); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1427,1432c +3:1440,1445c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + tenantInfoRepository.findOne(qTenantInfo.kp.eq(kp).and(qTenantInfo.tenantId.eq(tenantId))).ifPresent(s -> { + s.setTenantName(tenantName); + s.setTenantDesc(tenantDesc); + tenantInfoRepository.save(s); + }); +====1 +1:2327,2338c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=?"; + try { + return this.jt.query(sql, new Object[] {kp}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1437,1438c +3:1450,1451c + List list = tenantInfoRepository.findByKp(kp); + return TenantInfoMapStruct.INSTANCE.convertTenantInfoList(list); +====1 +1:2343,2354c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=? AND tenant_id=?"; + try { + return jt.queryForObject(sql, new Object[] {kp, tenantId}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1443,1444c +3:1456,1457c + TenantInfoEntity tenantInfoEntity = tenantInfoRepository.findByKpAndTenantId(kp, tenantId); + return TenantInfoMapStruct.INSTANCE.convertTenantInfo(tenantInfoEntity); +====1 +1:2359,2364c + try { + jt.update("DELETE FROM tenant_info WHERE kp=? AND tenant_id=?", kp, tenantId); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1449,1450c +3:1462,1463c + tenantInfoRepository.findOne(QTenantInfoEntity.tenantInfoEntity.tenantId.eq(tenantId) + .and(QTenantInfoEntity.tenantInfoEntity.kp.eq(kp))).ifPresent(s -> tenantInfoRepository.delete(s)); +====1 +1:2418,2431c + String sqlCountRows = " SELECT COUNT(*) FROM config_info "; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,md5,type,gmt_modified FROM " + + "( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) g, config_info t WHERE g.id = t.id"; + PaginationHelper helper = createPaginationHelper(); + try { + Page page = helper + .fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_WRAPPER_ROW_MAPPER); + + return page.getPageItems(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1504c +3:1517c + return null; +====1 +1:2448,2458c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,type,gmt_modified,md5 FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1521,1525c +3:1534,1538c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + ConfigInfoEntity result = configInfoRepository.findOne( + qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))).orElse(null); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapper(result); +====1 +1:2463,2469c + String sql = String.format("select 1 from %s limit 1", tableName); + try { + jt.queryForObject(sql, Integer.class); + return true; + } catch (Throwable e) { + return false; + } +2:1530c +3:1543c + return true; +====1 +1:2518,2530c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList<>(); + if (!CollectionUtils.isEmpty(ids)) { + where.append(" id in ("); + for (int i = 0; i < ids.size(); i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(ids.get(i)); + } + where.append(") "); +2:1579,1582c +3:1592,1595c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (!org.springframework.util.CollectionUtils.isEmpty(ids)) { + booleanBuilder.and(qConfigInfo.id.in(ids)); +====1 +1:2532,2536c + where.append(" tenant_id=? "); + paramList.add(tenantTmp); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + paramList.add(generateLikeArgument(dataId)); +2:1584,1588c +3:1597,1601c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.like(dataId)); +====1 +1:2539,2540c + where.append(" and group_id=? "); + paramList.add(group); +2:1591c +3:1604c + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:2543,2544c + where.append(" and app_name=? "); + paramList.add(appName); +2:1594c +3:1607c + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:2547,2552c + try { + return this.jt.query(SQL_FIND_ALL_CONFIG_INFO + where, paramList.toArray(), CONFIG_ALL_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1597,1605c +3:1610,1618c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + List resultList = new ArrayList<>(); + configInfos.forEach(s -> { + ConfigAllInfo configAllInfo = new ConfigAllInfo(); + BeanUtils.copyProperties(s, configAllInfo); + configAllInfo.setGroup(s.getGroupId()); + resultList.add(configAllInfo); + }); + return resultList; +====1 +1:2643,2647c + Integer result = this.jt + .queryForObject(SQL_TENANT_INFO_COUNT_BY_TENANT_ID, new String[] {tenantId}, Integer.class); + if (result == null) { + return 0; + } +2:1696,1697c +3:1709,1710c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + Long result = tenantInfoRepository.count(qTenantInfo.tenantId.eq(tenantId)); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_HistoryController.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_HistoryController.java.txt new file mode 100644 index 0000000000..f20395adc2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_HistoryController.java.txt @@ -0,0 +1,94 @@ +====3 +1:31,33c +2:31,33c + import javax.servlet.http.HttpServletRequest; + import javax.servlet.http.HttpServletResponse; + +3:30a +====2 +1:42c +3:39c + +2:42c + +====2 +1:45c +3:42c + +2:45c + +====3 +1:49,53c +2:49,53c + * @param dataId dataId string value. + * @param group group string value. + * @param tenant tenant string value. + * @param appName appName string value. + * @param pageNo pageNo string value. +3:46,50c + * @param dataId dataId string value. + * @param group group string value. + * @param tenant tenant string value. + * @param appName appName string value. + * @param pageNo pageNo string value. +====2 +1:60,66c +3:57,63c + @RequestParam("group") String group, // + @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant, + @RequestParam(value = "appName", required = false) String appName, + @RequestParam(value = "pageNo", required = false) Integer pageNo, + // + @RequestParam(value = "pageSize", required = false) Integer pageSize, // + ModelMap modelMap) { +2:60,66c + @RequestParam("group") String group, // + @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant, + @RequestParam(value = "appName", required = false) String appName, + @RequestParam(value = "pageNo", required = false) Integer pageNo, + // + @RequestParam(value = "pageSize", required = false) Integer pageSize, // + ModelMap modelMap) { +====2 +1:73c +3:70c + +2:73c + +====3 +1:75c +2:75c + * Query the detailed configuration history informations. +3:72,75c + * Query the detailed configuration history information. + * + * @param nid history_config_info nid + * @return history config info +==== +1:78,79c + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { +2:78,79c + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { +3:78c + public ConfigHistoryInfo getConfigHistoryInfo(@RequestParam("nid") Long nid) { +==== +1:82c + +2:82c + +3:81,93c + + /** + * Query previous config history information. + * + * @param id config_info id + * @return history config info + * @since 1.4.0 + */ + @GetMapping(value = "/previous") + public ConfigHistoryInfo getPreviousConfigHistoryInfo(@RequestParam("id") Long id) { + return persistService.detailPreviousConfigHistory(id); + } + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_MergeDatumService.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_MergeDatumService.java.txt new file mode 100644 index 0000000000..154063ccc5 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_MergeDatumService.java.txt @@ -0,0 +1,35 @@ +====3 +1:30,31c +2:30,31c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +3:30,32c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.ApplicationUtils; + import com.alibaba.nacos.sys.utils.InetUtils; +====3 +1:109c +2:109c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIp()); +3:110c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIP()); +====3 +1:117c +2:117c + if (ApplicationUtils.getStandaloneMode()) { +3:118c + if (EnvUtil.getStandaloneMode()) { +==== +1:166,168c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn("[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); +2:166,169c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn( + "[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); +3:167,169c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIP(), null); + LOGGER.warn("[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_MergeTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_MergeTaskProcessor.java.txt new file mode 100644 index 0000000000..f8190d8c70 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_MergeTaskProcessor.java.txt @@ -0,0 +1,38 @@ +==== +1:20,21c + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:20c + import com.alibaba.nacos.common.task.AbstractDelayTask; +3:20,21c + import com.alibaba.nacos.common.task.NacosTask; + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:22a +2:22c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====3 +1:31c +2:31c + import com.alibaba.nacos.core.utils.InetUtils; +3:32c + import com.alibaba.nacos.sys.utils.InetUtils; +====3 +1:55c +2:55c + public boolean process(AbstractDelayTask task) { +3:56c + public boolean process(NacosTask task) { +====3 +1:87c +2:87c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +3:88c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), +====3 +1:101c +2:101c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +3:102c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_NotifyTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_NotifyTaskProcessor.java.txt new file mode 100644 index 0000000000..b34849e3c8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_NotifyTaskProcessor.java.txt @@ -0,0 +1,58 @@ +==== +1:20,21c + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:20c + import com.alibaba.nacos.common.task.AbstractDelayTask; +3:20,21c + import com.alibaba.nacos.common.task.NacosTask; + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:22a +2:22c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====3 +1:27,28c +2:27,28c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +3:28,29c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====3 +1:49c +2:49c + public boolean process(AbstractDelayTask task) { +3:50c + public boolean process(NacosTask task) { +====3 +1:76c +2:76c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +3:77c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====3 +1:78c +2:78c + .format(URL_PATTERN, serverIp, ApplicationUtils.getContextPath(), dataId, group); +3:79c + .format(URL_PATTERN, serverIp, EnvUtil.getContextPath(), dataId, group); +====3 +1:82c +2:82c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +3:83c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====3 +1:92c +2:92c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +3:93c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====3 +1:100c +2:100c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +3:101c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_application.properties.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_application.properties.txt new file mode 100644 index 0000000000..9395a60678 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_application.properties.txt @@ -0,0 +1,85 @@ +====3 +1:40,41c +2:40,41c + # db.user=nacos + # db.password=nacos +3:40,41c + # db.user.0=nacos + # db.password.0=nacos +====3 +1:112c +2:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-fe/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +3:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-ui/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +==== +1:177a +2:178,210c + + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + +3:178,211c + + + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_pom.xml.txt new file mode 100644 index 0000000000..25aac30ad6 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports/diff_pom.xml.txt @@ -0,0 +1,163 @@ +====3 +1:25c +2:25c + 1.4.0-SNAPSHOT +3:25c + 1.4.1-SNAPSHOT +====3 +1:39c +2:39c + nacos-all-1.4.0-SNAPSHOT +3:39c + nacos-all-1.4.1-SNAPSHOT +====3 +1:129c +2:129c + 2.1.16.RELEASE +3:129c + 2.1.17.RELEASE +====3 +1:131c +2:131c + 2.6 +3:130a +====1 +1:133c + 2.2 +2:133c +3:132c + 2.6 +====3 +1:144c +2:144c + 1.7.17 +3:142a +====1 +1:170a +2:171,177c +3:169,175c + 1.3.2.beta1 + 1.3.2.beta1 + 1.3.1.Final + 19.3.0.0 + 4.2.1 + 3.4.1 + 1.18.12 +====1 +1:279a +2:287,289c +3:285,287c + **/com/alibaba/nacos/config/server/modules/entity/*.java + **/com/alibaba/nacos/config/server/modules/mapstruct/*.java + **/com/alibaba/nacos/config/server/configuration/datasource/DynamicDataSource.java +==== +1:307c + **/istio/model/**,**/nacos/test/** +2:317c + **/istio/model/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** +3:315c + **/istio/model/**,**/consistency/entity/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** +====3 +1:341a +2:351a +3:350c + /console-ui/** +====3 +1:553a +2:563a +3:563,581c + + remove-test-data + + + + org.apache.maven.plugins + maven-clean-plugin + + false + + + ${user.home}/nacos/data + + + + + + + +====3 +1:581a +2:591a +3:610c + sys +====3 +1:688a +2:698a +3:718,722c + + ${project.groupId} + nacos-sys + ${project.version} + +====3 +1:712,717c +2:722,727c + + commons-lang + commons-lang + ${commons-lang.version} + + +3:745a +====3 +1:817,822c +2:827,832c + + com.ning + async-http-client + ${async-http-client.version} + + +3:844a +====1 +1:1027a +2:1038,1075c +3:1050,1087c + + + org.mapstruct + mapstruct-jdk8 + ${mapstruct.version} + + + + org.mapstruct + mapstruct-processor + ${mapstruct.version} + + + + org.projectlombok + lombok + true + ${lombok.version} + + + + com.querydsl + querydsl-jpa + ${querydsl.version} + + + + com.zaxxer + HikariCP + ${hikariCP.version} + + + + com.oracle.ojdbc + ojdbc8 + ${ojdbc.version} + + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_AsyncNotifyService.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_AsyncNotifyService.java.txt new file mode 100644 index 0000000000..b0d839171d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_AsyncNotifyService.java.txt @@ -0,0 +1,74 @@ +====3 +1:35,36c +2:35,36c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +3:35,36c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:107c + +2:107c +3:107c + +====3 +1:130c +2:130c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +3:130c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +==== +1:136,137c + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +2:136,138c + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, + String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +3:136,137c + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====1 +1:157c + +2:158c +3:157c + +====3 +1:171c +2:172c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +3:171c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +====3 +1:177c +2:178c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +3:177c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +====3 +1:199c +2:200c + InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +3:199c + InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +====3 +1:262c +2:263c + this.url = MessageFormat.format(URL_PATTERN, target, ApplicationUtils.getContextPath(), dataId, group); +3:262c + this.url = MessageFormat.format(URL_PATTERN, target, EnvUtil.getContextPath(), dataId, group); +====3 +1:265c +2:266c + .format(URL_PATTERN_TENANT, target, ApplicationUtils.getContextPath(), dataId, group, tenant); +3:265c + .format(URL_PATTERN_TENANT, target, EnvUtil.getContextPath(), dataId, group, tenant); +====1 +1:312c + } +\ No newline at end of file +2:313c +3:312c + } diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_ConfigController.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_ConfigController.java.txt new file mode 100644 index 0000000000..b8fee64e3d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_ConfigController.java.txt @@ -0,0 +1,183 @@ +====3 +1:18a +2:18a +3:19c + import com.alibaba.nacos.api.config.ConfigType; +====3 +1:22a +2:22a +3:24c + import com.alibaba.nacos.common.model.RestResultUtils; +====1 +1:34a +2:35c +3:37c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +====1 +1:38c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +2:38a +3:40a +====3 +1:45a +2:45a +3:48c + import com.alibaba.nacos.common.utils.NamespaceUtil; +====3 +1:48c +2:48c + import com.alibaba.nacos.core.utils.InetUtils; +3:51c + import com.alibaba.nacos.sys.utils.InetUtils; +====3 +1:93,94c +2:93,94c + private static final String NAMESPACE_PUBLIC_KEY = "public"; + +3:95a +====3 +1:137a +2:137a +3:139,142c + //check type + if (!ConfigType.isValidType(type)) { + type = ConfigType.getDefaultType().getType(); + } +====3 +1:178c +2:178c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIp(), +3:183c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====3 +1:199c +2:199c + tenant = processTenant(tenant); +3:204c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====3 +1:283c +2:283c + return ResultBuilder.buildSuccessResult(true); +3:288c + return RestResultUtils.success(true); +====3 +1:472c +2:472c + tenant = processTenant(tenant); +3:477c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====3 +1:527c +2:527c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +3:532c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====3 +1:530,534c +2:530,534c + if (StringUtils.isNotBlank(namespace)) { + if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); + } +3:535,538c + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====3 +1:535a +2:535a +3:540c + +====3 +1:548c +2:548c + return ResultBuilder.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +3:553c + return RestResultUtils.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +====3 +1:560c +2:560c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +3:565c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +====3 +1:584c +2:584c + return ResultBuilder.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +3:589c + return RestResultUtils.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +====3 +1:588c +2:588c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +3:593c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====3 +1:601c +2:601c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +3:606c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====3 +1:604c +2:604c + return ResultBuilder.buildSuccessResult("导入成功", saveResult); +3:609c + return RestResultUtils.success("导入成功", saveResult); +====3 +1:628c +2:628c + return ResultBuilder.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +3:633c + return RestResultUtils.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +====3 +1:631,634c +2:631,634c + + if (NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(namespace)) { + namespace = ""; + } else if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { +3:636,638c + + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { +====3 +1:636c +2:636c + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +3:640c + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====3 +1:650c +2:650c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +3:654c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====3 +1:674c +2:674c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +3:678c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====3 +1:687c +2:687c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +3:691c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====3 +1:690,697c +2:690,697c + return ResultBuilder.buildSuccessResult("Clone Completed Successfully", saveResult); + } + + private String processTenant(String tenant) { + if (StringUtils.isEmpty(tenant) || NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(tenant)) { + return ""; + } + return tenant; +3:694c + return RestResultUtils.success("Clone Completed Successfully", saveResult); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_DiskUtils.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_DiskUtils.java.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_ExternalDataSourceServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_ExternalDataSourceServiceImpl.java.txt new file mode 100644 index 0000000000..0492051512 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_ExternalDataSourceServiceImpl.java.txt @@ -0,0 +1,150 @@ +====1 +1:19,39c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import javax.sql.DataSource; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + +2:18a +3:18a +====3 +1:40a +2:19a +3:20c + import com.alibaba.nacos.common.utils.IPUtil; +====2 +1:43c +3:23c + import com.alibaba.nacos.config.server.utils.ConfigExecutor; +2:21a +====3 +1:45c +2:23c + import com.alibaba.nacos.core.utils.ApplicationUtils; +3:25c + import com.alibaba.nacos.sys.env.EnvUtil; +==== +1:46a +2:25,42c + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +3:27,41c + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +====3 +1:55,58c +2:51,54c + private static final Logger LOGGER = LoggerFactory.getLogger(ExternalDataSourceServiceImpl.class); + + private static final String JDBC_DRIVER_NAME = "com.mysql.cj.jdbc.Driver"; + +3:49a +====3 +1:86,87c +2:82,83c + private static Pattern ipPattern = Pattern.compile("\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}"); + +3:76a +====1 +1:120,122c + + ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +2:116,118c +3:109,111c + + // ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + // ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +====3 +1:130c +2:126c + .build(ApplicationUtils.getEnvironment(), (dataSource) -> { +3:119c + .build(EnvUtil.getEnvironment(), (dataSource) -> { +====3 +1:193c +2:189c + return "DOWN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +3:182c + return "DOWN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====3 +1:196c +2:192c + return "WARN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +3:185c + return "WARN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====3 +1:204,217c +2:200,213c + private String getIpFromUrl(String url) { + + Matcher m = ipPattern.matcher(url); + if (m.find()) { + return m.group(); + } + + return ""; + } + + static String defaultIfNull(String value, String defaultValue) { + return null == value ? defaultValue : value; + } + +3:192a +====3 +1:272c +2:268c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +3:247c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); +====3 +1:275c +2:271c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +3:250c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_ExternalPermissionPersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_ExternalPermissionPersistServiceImpl.java.txt new file mode 100644 index 0000000000..5424bf2de0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_ExternalPermissionPersistServiceImpl.java.txt @@ -0,0 +1,128 @@ +==== +1:21,24c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; +2:21,24c + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; +3:21,28c + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; +====1 +1:27,28c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; +2:27c +3:31c + import org.springframework.data.domain.PageRequest; +==== +1:31,35c + import javax.annotation.PostConstruct; + import java.util.ArrayList; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.PERMISSION_ROW_MAPPER; + +2:29a +3:34,40c + import javax.annotation.PostConstruct; + import java.util.ArrayList; + import java.util.Collections; + import java.util.List; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.PERMISSION_ROW_MAPPER; + +====1 +1:46,53c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:40c +3:51c + private PermissionsRepository permissionsRepository; +====1 +1:56,83c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role='" + role + "' "; + + if (StringUtils.isBlank(role)) { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:43,50c +3:54,61c + org.springframework.data.domain.Page sPage = permissionsRepository + .findAll(QPermissionsEntity.permissionsEntity.role.eq(role), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(PermissionsMapStruct.INSTANCE.convertPermissionInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:94,102c + + String sql = "INSERT into permissions (role, resource, action) VALUES (?, ?, ?)"; + + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:61,62c +3:72,73c + + permissionsRepository.save(new PermissionsEntity(role, resource, action)); +====1 +1:113,120c + + String sql = "DELETE from permissions WHERE role=? and resource=? and action=?"; + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:73,77c +3:84,88c + + QPermissionsEntity qPermissions = QPermissionsEntity.permissionsEntity; + permissionsRepository.findOne( + qPermissions.role.eq(role).and(qPermissions.resource.eq(resource)).and(qPermissions.action.eq(action))) + .ifPresent(p -> permissionsRepository.delete(p)); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_ExternalRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_ExternalRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..17328923af --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_ExternalRolePersistServiceImpl.java.txt @@ -0,0 +1,204 @@ +==== +1:21,24c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; +2:21,24c + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; +3:21,28c + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; +====1 +1:27,29c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.RowMapper; +2:27c +3:31c + import org.springframework.data.domain.PageRequest; +==== +1:32,35c + import javax.annotation.PostConstruct; + import java.sql.ResultSet; + import java.sql.SQLException; + import java.util.ArrayList; +2:29a +3:34,38c + import javax.annotation.PostConstruct; + import java.sql.ResultSet; + import java.sql.SQLException; + import java.util.ArrayList; + import java.util.Collections; +====1 +1:36a +2:31c +3:40c + import java.util.stream.Collectors; +====1 +1:38c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.ROLE_INFO_ROW_MAPPER; +2:32a +3:41a +====1 +1:50,57c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:44c +3:53c + private RolesRepository rolesRepository; +====1 +1:61,81c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from (select distinct role from roles) roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " 1=1 "; + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + return pageInfo; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:48,55c +3:57,64c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:86,104c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + where = " 1=1 "; + } + + try { + return helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:60,67c +3:69,76c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(QRolesEntity.rolesEntity.username.eq(username), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:110c + * @param role role string value. +2:73c +3:82c + * @param role role string value. +====1 +1:115,122c + String sql = "INSERT into roles (role, username) VALUES (?, ?)"; + + try { + jt.update(sql, role, userName); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:78c +3:87c + rolesRepository.save(new RolesEntity(userName, role)); +====1 +1:131,137c + String sql = "DELETE from roles WHERE role=?"; + try { + jt.update(sql, role); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:87,88c +3:96,97c + Iterable iterable = rolesRepository.findAll(QRolesEntity.rolesEntity.role.eq(role)); + rolesRepository.deleteAll(iterable); +====1 +1:143c + * @param role role string value. +2:94c +3:103c + * @param role role string value. +====1 +1:147,153c + String sql = "DELETE from roles WHERE role=? and username=?"; + try { + jt.update(sql, role, username); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:98,100c +3:107,109c + QRolesEntity qRoles = QRolesEntity.rolesEntity; + rolesRepository.findOne(qRoles.role.eq(role).and(qRoles.username.eq(username))) + .ifPresent(s -> rolesRepository.delete(s)); +====1 +1:158,160c + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[]{role}, String.class); + return users; +2:105,107c +3:114,116c + List rolesEntities = (List) rolesRepository + .findAll(QRolesEntity.rolesEntity.role.like(role)); + return rolesEntities.stream().map(s -> s.getRole()).collect(Collectors.toList()); +====1 +1:163,172c + private static final class RoleInfoRowMapper implements RowMapper { + + @Override + public RoleInfo mapRow(ResultSet rs, int rowNum) throws SQLException { + RoleInfo roleInfo = new RoleInfo(); + roleInfo.setRole(rs.getString("role")); + roleInfo.setUsername(rs.getString("username")); + return roleInfo; + } + } +2:109a +3:118a diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_ExternalStoragePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_ExternalStoragePersistServiceImpl.java.txt new file mode 100644 index 0000000000..d4cc7360ae --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_ExternalStoragePersistServiceImpl.java.txt @@ -0,0 +1,2999 @@ +====1 +1:41,42c + import com.alibaba.nacos.config.server.service.datasource.DataSourceService; + import com.alibaba.nacos.config.server.service.datasource.DynamicDataSource; +2:41,74c +3:41,74c + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.HisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.QHisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QTenantInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.TenantInfoEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAdvanceInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAllInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigHistoryInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4BetaMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4TagMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoAggrMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoBetaWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoChangedMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoEntityMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoTagWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.TenantInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoAggrRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoBetaRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoTagRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigTagsRelationRepository; + import com.alibaba.nacos.config.server.modules.repository.HisConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.TenantInfoRepository; +====1 +1:48c + import com.google.common.collect.Lists; +2:80,81c +3:80,81c + import com.querydsl.core.BooleanBuilder; + import lombok.extern.slf4j.Slf4j; +====1 +1:50a +2:84,85c +3:84,85c + import org.springframework.beans.BeanUtils; + import org.springframework.beans.factory.annotation.Autowired; +====1 +1:55c + import org.springframework.dao.IncorrectResultSizeDataAccessException; +2:90,92c +3:90,92c + import org.springframework.data.domain.PageRequest; + import org.springframework.data.domain.Sort; + import org.springframework.data.jpa.domain.Specification; +====1 +1:57,61c + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.PreparedStatementCreator; + import org.springframework.jdbc.core.PreparedStatementSetter; + import org.springframework.jdbc.support.GeneratedKeyHolder; + import org.springframework.jdbc.support.KeyHolder; +2:93a +3:93a +====1 +1:70c + import javax.annotation.PostConstruct; +2:102,105c +3:102,105c + import javax.persistence.criteria.CriteriaBuilder; + import javax.persistence.criteria.CriteriaQuery; + import javax.persistence.criteria.Predicate; + import javax.persistence.criteria.Root; +====1 +1:72,75c + import java.sql.Connection; + import java.sql.PreparedStatement; + import java.sql.SQLException; + import java.sql.Statement; +2:106a +3:106a +====1 +1:81a +2:113c +3:113c + import java.util.stream.Collectors; +====1 +1:83,97c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ADVANCE_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ALL_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4TAG_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_AGGR_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BASE_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_CHANGED_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_KEY_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_DETAIL_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_LIST_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.TENANT_INFO_ROW_MAPPER; +2:114a +3:114a +====1 +1:104a +2:122c +3:122c + @Slf4j +====1 +1:110c + private DataSourceService dataSourceService; +2:128,129c +3:128,129c + @Autowired + private ConfigInfoRepository configInfoRepository; +====1 +1:112c + private static final String SQL_FIND_ALL_CONFIG_INFO = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,c_schema from config_info"; +2:131,132c +3:131,132c + @Autowired + private ConfigInfoBetaRepository configInfoBetaRepository; +====1 +1:114c + private static final String SQL_TENANT_INFO_COUNT_BY_TENANT_ID = "select count(1) from tenant_info where tenant_id = ?"; +2:134,135c +3:134,135c + @Autowired + private ConfigInfoTagRepository configInfoTagRepository; +====1 +1:116c + private static final String SQL_FIND_CONFIG_INFO_BY_IDS = "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5 FROM config_info WHERE "; +2:137,138c +3:137,138c + @Autowired + private ConfigTagsRelationRepository configTagsRelationRepository; +====1 +1:118c + private static final String SQL_DELETE_CONFIG_INFO_BY_IDS = "DELETE FROM config_info WHERE "; +2:140,141c +3:140,141c + @Autowired + private HisConfigInfoRepository hisConfigInfoRepository; +====1 +1:120c + private static final String PATTERN_STR = "*"; +2:143,144c +3:143,144c + @Autowired + private TenantInfoRepository tenantInfoRepository; +====1 +1:122c + private static final int QUERY_LIMIT_SIZE = 50; +2:146,147c +3:146,147c + @Autowired + private ConfigInfoAggrRepository configInfoAggrRepository; +====1 +1:124,126c + protected JdbcTemplate jt; + + protected TransactionTemplate tjt; +2:149,150c +3:149,150c + @Autowired + private TransactionTemplate tjt; +====1 +1:133,171c + /** + * init datasource. + */ + @PostConstruct + public void init() { + dataSourceService = DynamicDataSource.getInstance().getDataSource(); + + jt = getJdbcTemplate(); + tjt = getTransactionTemplate(); + } + + public boolean checkMasterWritable() { + return dataSourceService.checkMasterWritable(); + } + + public void setBasicDataSourceService(DataSourceService dataSourceService) { + this.dataSourceService = dataSourceService; + } + + public synchronized void reload() throws IOException { + this.dataSourceService.reload(); + } + + /** + * For unit testing. + */ + public JdbcTemplate getJdbcTemplate() { + return this.dataSourceService.getJdbcTemplate(); + } + + public TransactionTemplate getTransactionTemplate() { + return this.dataSourceService.getTransactionTemplate(); + } + + @SuppressWarnings("checkstyle:AbbreviationAsWordInName") + public String getCurrentDBUrl() { + return this.dataSourceService.getCurrentDbUrl(); + } + +2:156a +3:156a +====1 +1:174c + return new ExternalStoragePaginationHelperImpl(jt); +2:159c +3:159c + return null; +====1 +1:182,191c + boolean result = tjt.execute(status -> { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:167,183c +3:167,183c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfoEntity.getGroupId(), + configInfoEntity.getTenantId()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; + } + return Boolean.TRUE; +====1 +1:193c + return Boolean.TRUE; +2:184a +3:184a +====1 +1:202c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:193c +3:193c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:204,207c + jt.update("INSERT INTO config_info_beta(data_id,group_id,tenant_id,app_name,content,md5,beta_ips,src_ip," + + "src_user,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, appNameTmp, configInfo.getContent(), md5, betaIps, srcIp, srcUser, + time, time); +2:195,207c +3:195,207c + ConfigInfoBetaEntity configInfoBeta = new ConfigInfoBetaEntity(); + configInfoBeta.setDataId(configInfo.getDataId()); + configInfoBeta.setGroupId(configInfo.getGroup()); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setBetaIps(betaIps); + configInfoBeta.setMd5(md5); + configInfoBeta.setGmtCreate(time); + configInfoBeta.setGmtModified(time); + configInfoBeta.setSrcUser(srcUser); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setTenantId(tenantTmp); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:209c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:209c +3:209c + log.error("[db-error] " + e.toString(), e); +====1 +1:220c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:219a +3:219a +====1 +1:222,226c + jt.update( + "INSERT INTO config_info_tag(data_id,group_id,tenant_id,tag_id,app_name,content,md5,src_ip,src_user," + + "gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, tagTmp, appNameTmp, configInfo.getContent(), md5, srcIp, srcUser, + time, time); +2:221,234c +3:221,234c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoTagEntity configInfoTag = new ConfigInfoTagEntity(); + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:228c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:236c +3:236c + log.error("[db-error] " + e.toString(), e); +====1 +1:236,254c + boolean result = tjt.execute(status -> { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + /* + If the appName passed by the user is not empty, use the persistent user's appName, + otherwise use db; when emptying appName, you need to pass an empty string + */ + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // delete all tags and then recreate + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); +2:244,268c +3:244,268c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + // 用户传过来的appName不为空,则用持久化用户的appName,否则用db的;清空appName的时候需要传空串 + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + configInfo.setId(oldConfigInfo.getId()); + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // 删除所有tag,然后再重新创建 + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); + } + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:256,259c + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:270c +3:270c + return Boolean.TRUE; +====1 +1:261c + return Boolean.TRUE; +2:271a +3:271a +====1 +1:268c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); +2:277a +3:277a +====1 +1:270,275c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + try { + jt.update( + "UPDATE config_info_beta SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5, srcIp, srcUser, + time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp); +2:279,300c +3:279,300c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(configInfo.getDataId())) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(configInfo.getDataId())); + } + if (StringUtils.isNotBlank(configInfo.getGroup())) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(configInfo.getGroup())); + } + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenantTmp)); + } + ConfigInfoBetaEntity configInfoBeta = configInfoBetaRepository.findOne(booleanBuilder).orElse(null); + try { + String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setMd5(md5); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setSrcUser(srcUser); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:277c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:302c +3:302c + log.error("[db-error] " + e.toString(), e); +====1 +1:288,293c + try { + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + jt.update( + "UPDATE config_info_tag SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", configInfo.getContent(), md5, + srcIp, srcUser, time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp, tagTmp); +2:313,331c +3:313,331c + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + ConfigInfoTagEntity configInfoTag = configInfoTagRepository.findOne( + qConfigInfoTag.dataId.eq(configInfo.getDataId()).and(qConfigInfoTag.groupId.eq(configInfo.getGroup())) + .and(qConfigInfoTag.tenantId.eq(tenantTmp)).and(qConfigInfoTag.tagId.eq(tagTmp))) + .orElse(new ConfigInfoTagEntity()); + try { + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:295c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:333c +3:333c + log.error("[db-error] " + e.toString(), e); +====1 +1:323,330c + try { + jt.update( + "UPDATE config_info SET md5 = ? WHERE data_id=? AND group_id=? AND tenant_id=? AND gmt_modified=?", + md5, dataId, group, tenantTmp, lastTime); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:361,378c +3:361,378c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (lastTime != null) { + booleanBuilder.and(qConfigInfo.gmtModified.eq(lastTime)); + } + configInfoRepository.findOne(booleanBuilder).ifPresent(config -> { + config.setMd5(md5); + configInfoRepository.save(config); + }); +====1 +1:416,421c + tjt.execute(status -> { + try { + ConfigInfo configInfo = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo != null) { + jt.update("DELETE FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, + group, tenantTmp); +2:464,474c +3:464,474c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo4Beta configInfo4Beta = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo4Beta != null) { + configInfoBetaRepository.deleteById(configInfo4Beta.getId()); + } + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:423,425c + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:476c +3:476c + return Boolean.TRUE; +====1 +1:427c + return Boolean.TRUE; +2:477a +3:477a +====1 +1:439,442c + String select = "SELECT content FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + String insert = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + String update = "UPDATE config_info_aggr SET content = ? , gmt_modified = ? WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + +2:489,502c +3:489,502c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } +====1 +1:445,446c + String dbContent = jt + .queryForObject(select, new Object[] {dataId, group, tenantTmp, datumId}, String.class); +2:505c +3:505c + ConfigInfoAggrEntity result = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); +====1 +1:448c + if (dbContent != null && dbContent.equals(content)) { +2:507c +3:507c + if (result.getContent() != null && result.getContent().equals(content)) { +====1 +1:451c + return jt.update(update, content, now, dataId, group, tenantTmp, datumId) > 0; +2:510,513c +3:510,513c + result.setContent(content); + result.setGmtModified(now); + configInfoAggrRepository.save(result); + return true; +====1 +1:454c + return jt.update(insert, dataId, group, tenantTmp, datumId, appNameTmp, content, now) > 0; +2:516,526c +3:516,526c + ConfigInfoAggrEntity configInfoAggrEntity = new ConfigInfoAggrEntity(); + configInfoAggrEntity.setDataId(dataId); + configInfoAggrEntity.setGroupId(group); + configInfoAggrEntity.setDatumId(datumId); + configInfoAggrEntity.setContent(content); + configInfoAggrEntity.setGmtModified(now); + configInfoAggrEntity.setAppName(appNameTmp); + configInfoAggrEntity.setTenantId(tenantTmp); + configInfoAggrRepository.save(configInfoAggrEntity); + return true; + +====1 +1:465,466c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; +2:536a +3:536a +====1 +1:468,482c + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index++, tenantTmp); + ps.setString(index, datumId); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:537a +3:537a +====1 +1:487,502c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=?"; + + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index, tenantTmp); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:542,545c +3:542,545c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); +====1 +1:503a +2:547,553c +3:547,553c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + configInfoAggrRepository.findOne(booleanBuilder).ifPresent(aggr -> configInfoAggrRepository.delete(aggr)); +====1 +1:509,523c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final StringBuilder datumString = new StringBuilder(); + for (String datum : datumList) { + datumString.append("'").append(datum).append("',"); + } + datumString.deleteCharAt(datumString.length() - 1); + final String sql = + "delete from config_info_aggr where data_id=? and group_id=? and tenant_id=? and datum_id in (" + + datumString.toString() + ")"; + try { + jt.update(sql, dataId, group, tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:558a +3:558a +====1 +1:529,536c + String sql = "delete from his_config_info where gmt_modified < ? limit ?"; + PaginationHelper helper = createPaginationHelper(); + try { + helper.updateLimit(sql, new Object[] {startTime, limitSize}); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:564,567c +3:564,567c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository + .findAll(qHisConfigInfo.gmtModified.lt(startTime), PageRequest.of(0, limitSize)); + hisConfigInfoRepository.deleteAll(iterable); +====1 +1:541,542c + String sql = "SELECT COUNT(*) FROM his_config_info WHERE gmt_modified < ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {startTime}); +2:572,573c +3:572,573c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Long result = hisConfigInfoRepository.count(qHisConfigInfo.gmtModified.lt(startTime)); +====1 +1:551c + String sql = "SELECT max(id) FROM config_info"; +2:581a +3:581a +====1 +1:553c + return jt.queryForObject(sql, Long.class); +2:583,584c +3:583,584c + //TODO 关系型特性查询 + return configInfoRepository.findConfigMaxId(); +====1 +1:591,617c + try { + Boolean isReplaceOk = tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + String appNameTmp = appName == null ? "" : appName; + removeAggrConfigInfo(dataId, group, tenant); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + for (Map.Entry datumEntry : datumMap.entrySet()) { + jt.update(sql, dataId, group, tenantTmp, datumEntry.getKey(), appNameTmp, + datumEntry.getValue(), new Timestamp(System.currentTimeMillis())); + } + } catch (Throwable e) { + throw new TransactionSystemException("error in addAggrConfigInfo"); + } + return Boolean.TRUE; + } + }); + if (isReplaceOk == null) { + return false; + } + return isReplaceOk; + } catch (TransactionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:622c +3:622c + return true; +====1 +1:624,636c + String sql = "SELECT DISTINCT data_id, group_id FROM config_info"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:629c +3:629c + return null; +====1 +1:641,651c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,beta_ips FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO4BETA_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:634,647c +3:634,647c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenant)); + } + ConfigInfoBetaEntity configInfoBetaEntity = configInfoBetaRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoBeta data null")); + return ConfigInfo4BetaMapStruct.INSTANCE.convertConfigInfo4Beta(configInfoBetaEntity); +====1 +1:659,668c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,tag_id,app_name,content FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", + new Object[] {dataId, group, tenantTmp, tagTmp}, CONFIG_INFO4TAG_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:655,671c +3:655,671c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + ConfigInfoTagEntity result = configInfoTagRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoTag data null")); + return ConfigInfo4TagMapStruct.INSTANCE.convertConfigInfo4Tag(result); +====1 +1:674,684c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=? AND app_name=?", + new Object[] {dataId, group, tenantTmp, appName}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:677c +3:677c + return null; +====1 +1:690,733c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(group); + paramList.add(tenantTmp); + + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and group_id=? and tenant_id=? "); + if (StringUtils.isNotBlank(configTags)) { + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.group_id=? and a.tenant_id=? "); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sql.append(", "); + } + sql.append("?"); + paramList.add(tagArr[i]); + } + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return this.jt.queryForObject(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:683c +3:683c + return null; +====1 +1:739,748c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, StringUtils.EMPTY}, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:689,695c +3:689,695c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + return configInfoRepository.findOne(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group))).map(s -> { + ConfigInfoBase configInfoBase = new ConfigInfoBase(); + BeanUtils.copyProperties(s, configInfoBase); + configInfoBase.setGroup(s.getGroupId()); + return configInfoBase; + }).orElse(null); +====1 +1:753,762c + try { + return this.jt + .queryForObject("SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE ID=?", + new Object[] {id}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:700,701c +3:700,701c + + return null; +====1 +1:767,776c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5,type FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:706,712c +3:706,712c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:777a +2:714,718c +3:714,718c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity result = configInfoRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfo(result); +====1 +1:783,792c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:724,725c +3:724,725c + + return null; +====1 +1:798,807c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? and app_name=?", + new Object[] {dataId, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:731,732c +3:731,732c + + return null; +====1 +1:813,864c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where data_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:738c +3:738c + return null; +====1 +1:870,871c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); +2:743a +3:743a +====1 +1:874,922c + String sqlCount = "select count(*) from config_info"; + String sql = "select ID,data_id,group_id,tenant_id,app_name,content,type from config_info"; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id"; + sql = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id"; + + where.append(" a.tenant_id=? "); + + if (StringUtils.isNotBlank(dataId)) { + where.append(" and a.data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and a.group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and a.app_name=? "); + paramList.add(appName); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id=? "); + if (StringUtils.isNotBlank(dataId)) { + where.append(" and data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and app_name=? "); + paramList.add(appName); + } +2:746,765c +3:746,765c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + } + + private void buildConfigInfoCommonCondition(BooleanBuilder booleanBuilder, QConfigInfoEntity qConfigInfo, + final String dataId, final String group, final String appName) { + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); +====1 +1:924,929c + try { + return helper.fetchPage(sqlCount + where, sql + where, paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:767,771c +3:767,771c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(appName)) { + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:935,943c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:777c +3:777c + return null; +====1 +1:949,958c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:783c +3:783c + return null; +====1 +1:964,973c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=? and app_name =?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? and app_name =?", + new Object[] {group, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:789c +3:789c + return null; +====1 +1:979,1032c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder( + "select count(*) from config_info where group_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(group); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:795c +3:795c + return null; +====1 +1:1038,1047c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where tenant_id like ? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? and app_name=?", + new Object[] {generateLikeArgument(tenantTmp), appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:801c +3:801c + return null; +====1 +1:1053,1104c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where tenant_id like ? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:807c +3:807c + return null; +====1 +1:1110,1118c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:813c +3:813c + return null; +====1 +1:1123,1124c + String sql = " SELECT COUNT(ID) FROM config_info "; + Integer result = jt.queryForObject(sql, Integer.class); +2:818c +3:818c + Long result = configInfoRepository.count(); +====1 +1:1133,1134c + String sql = " SELECT COUNT(ID) FROM config_info where tenant_id like ?"; + Integer result = jt.queryForObject(sql, new Object[] {tenant}, Integer.class); +2:827,828c +3:827,828c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.tenantId.like(tenant)); +====1 +1:1143,1144c + String sql = " SELECT COUNT(ID) FROM config_info_beta "; + Integer result = jt.queryForObject(sql, Integer.class); +2:837c +3:837c + Long result = configInfoBetaRepository.count(); +====1 +1:1153,1154c + String sql = " SELECT COUNT(ID) FROM config_info_tag "; + Integer result = jt.queryForObject(sql, Integer.class); +2:846c +3:846c + Long result = configInfoTagRepository.count(); +====1 +1:1162,1165c + public List getTenantIdList(int page, int pageSize) { + String sql = "SELECT tenant_id FROM config_info WHERE tenant_id != '' GROUP BY tenant_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:854,864c +3:854,864c + public List getTenantIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("tenantId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1169,1172c + public List getGroupIdList(int page, int pageSize) { + String sql = "SELECT group_id FROM config_info WHERE tenant_id ='' GROUP BY group_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:868,878c +3:868,878c + public List getGroupIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("groupId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1178,1179c + String sql = " SELECT COUNT(ID) FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {dataId, group, tenantTmp}); +2:884,886c +3:884,886c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))); +====1 +1:1188,1213c + if (datumIds == null || datumIds.isEmpty()) { + return 0; + } + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder sql = new StringBuilder( + " SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ? and datum_id"); + if (isIn) { + sql.append(" in ("); + } else { + sql.append(" not in ("); + } + for (int i = 0, size = datumIds.size(); i < size; i++) { + if (i > 0) { + sql.append(", "); + } + sql.append("?"); + } + sql.append(")"); + + List objectList = Lists.newArrayList(dataId, group, tenantTmp); + objectList.addAll(datumIds); + Integer result = jt.queryForObject(sql.toString(), Integer.class, objectList.toArray()); + if (result == null) { + throw new IllegalArgumentException("aggrConfigInfoCount error"); + } + return result.intValue(); +2:895c +3:895c + return 0; +====1 +1:1228,1242c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5 " + + " FROM ( SELECT id FROM config_info WHERE tenant_id like ? ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, + new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:910c +3:910c + return null; +====1 +1:1247,1282c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String select = " SELECT data_id,group_id,app_name FROM ( " + + " SELECT id FROM config_info WHERE tenant_id LIKE ? ORDER BY id LIMIT ?, ? )" + + " g, config_info t WHERE g.id = t.id "; + + final int totalCount = configInfoCount(tenant); + int pageCount = totalCount / pageSize; + if (totalCount > pageSize * pageCount) { + pageCount++; + } + + if (pageNo > pageCount) { + return null; + } + + final Page page = new Page(); + page.setPageNumber(pageNo); + page.setPagesAvailable(pageCount); + page.setTotalCount(totalCount); + + try { + List result = jt + .query(select, new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, + // new Object[0], + CONFIG_KEY_ROW_MAPPER); + + for (ConfigKey item : result) { + page.getPageItems().add(item); + } + return page; + } catch (EmptyResultDataAccessException e) { + return page; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:915c +3:915c + return null; +====1 +1:1288,1300c + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,content,md5" + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) " + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:921c +3:921c + return null; +====1 +1:1305,1319c + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = " SELECT t.id,type,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + + List params = new ArrayList(); + + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, params.toArray(), pageNo, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:926c +3:926c + return null; +====1 +1:1324,1332c + String select = "SELECT id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,type from config_info where id > ? order by id asc limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(select, new Object[] {lastMaxId, 0, pageSize}, 1, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:931,940c +3:931,940c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(qConfigInfo.id.gt(lastMaxId), PageRequest.of(0, pageSize, Sort.by(Sort.Order.asc("id")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1337,1349c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_beta"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,beta_ips " + + " FROM ( SELECT id FROM config_info_beta ORDER BY id LIMIT ?,? )" + + " g, config_info_beta t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:945,952c +3:945,952c + org.springframework.data.domain.Page sPage = configInfoBetaRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoBetaWrapperMapStruct.INSTANCE.convertConfigInfoBetaWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1354,1366c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_tag"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,tag_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info_tag ORDER BY id LIMIT ?,? ) " + + "g, config_info_tag t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:957,964c +3:957,964c + org.springframework.data.domain.Page sPage = configInfoTagRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoTagWrapperMapStruct.INSTANCE.convertConfigInfoTagWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1372,1414c + // assert dataids group not null + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + // if dataids empty return empty list + if (CollectionUtils.isEmpty(dataIds)) { + return Collections.emptyList(); + } + + // Batch query limit + // The number of in is controlled within 100, the shorter the length of the SQL statement, the better + if (subQueryLimit > QUERY_LIMIT_SIZE) { + subQueryLimit = 50; + } + List result = new ArrayList(dataIds.size()); + + String sqlStart = "select data_id, group_id, tenant_id, app_name, content from config_info where group_id = ? and tenant_id = ? and data_id in ("; + String sqlEnd = ")"; + StringBuilder subQuerySql = new StringBuilder(); + + for (int i = 0; i < dataIds.size(); i += subQueryLimit) { + // dataids + List params = new ArrayList( + dataIds.subList(i, i + subQueryLimit < dataIds.size() ? i + subQueryLimit : dataIds.size())); + + for (int j = 0; j < params.size(); j++) { + subQuerySql.append("?"); + if (j != params.size() - 1) { + subQuerySql.append(","); + } + } + + // group + params.add(0, group); + params.add(1, tenantTmp); + + List r = this.jt + .query(sqlStart + subQuerySql.toString() + sqlEnd, params.toArray(), CONFIG_INFO_ROW_MAPPER); + + // assert not null + if (r != null && r.size() > 0) { + result.addAll(r); + } + } + return result; +2:970c +3:970c + return null; +====1 +1:1420,1463c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + if (StringUtils.isBlank(appName)) { + return this.findAllConfigInfo(pageNo, pageSize, tenantTmp); + } else { + return this.findConfigInfoByApp(pageNo, pageSize, tenantTmp, appName); + } + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + where += " and tenant_id like ? "; + params.add(generateLikeArgument(tenantTmp)); + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:976c +3:976c + return null; +====1 +1:1469,1562c + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + for (ConfigKey configInfo : configKeys) { + String dataId = configInfo.getDataId(); + String group = configInfo.getGroup(); + String appName = configInfo.getAppName(); + + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + return helper.fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:982c +3:982c + return null; +====1 +1:1572,1636c + PaginationHelper helper = createPaginationHelper(); + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info"; + StringBuilder where = new StringBuilder(" where "); + List params = new ArrayList(); + params.add(generateLikeArgument(tenantTmp)); + if (StringUtils.isNotBlank(configTags)) { + sqlCountRows = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id "; + sqlFetchRows = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join config_tags_relation b on a.id=b.id "; + + where.append(" a.tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and a.data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and a.group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and a.app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and a.content like ? "); + params.add(generateLikeArgument(content)); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + params.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and content like ? "); + params.add(generateLikeArgument(content)); + } + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:992,1008c +3:992,1008c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.like(tenant)); + } + if (StringUtils.isNotBlank(content)) { + booleanBuilder.and(qConfigInfo.content.like(content)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1642,1672c + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + throw new IOException("invalid param"); + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,content from config_info where "; + String where = " 1=1 and tenant_id='' "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1014c +3:1014c + return null; +====1 +1:1678,1691c + String sql = "SELECT id,data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; + + try { + return this.jt + .queryForObject(sql, new Object[] {dataId, group, tenantTmp, datumId}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + // EmptyResultDataAccessException, indicating that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); +2:1020,1026c +3:1020,1026c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); +====1 +1:1692a +2:1028,1036c +3:1028,1036c + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenantTmp)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } + + ConfigInfoAggrEntity configInfoAggrEntity = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggr(configInfoAggrEntity); +====1 +1:1697,1710c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "SELECT data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? ORDER BY datum_id"; + + try { + return this.jt.query(sql, new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1041c +3:1041c + return null; +====1 +1:1717,1730c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where data_id=? and " + + "group_id=? and tenant_id=? order by datum_id limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, new Object[] {dataId, group, tenantTmp}, sqlFetchRows, + new Object[] {dataId, group, tenantTmp, (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_AGGR_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1048,1058c +3:1048,1058c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + org.springframework.data.domain.Page sPage = configInfoAggrRepository.findAll( + qConfigInfoAggr.dataId.eq(dataId).and(qConfigInfoAggr.groupId.eq(group)) + .and(qConfigInfoAggr.tenantId.eq(tenantTmp)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.by("datumId")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggrList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1737,1831c + String sqlCountRows = "select count(*) from config_info_aggr where "; + String sqlFetchRows = "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + + for (ConfigKey configInfoAggr : configKeys) { + String dataId = configInfoAggr.getDataId(); + String group = configInfoAggr.getGroup(); + String appName = configInfoAggr.getAppName(); + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + Page result = helper + .fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_AGGR_ROW_MAPPER); + return result; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1065c +3:1065c + return null; +====1 +1:1836,1848c + String sql = "SELECT DISTINCT data_id, group_id, tenant_id FROM config_info_aggr"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_CHANGED_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1070,1071c +3:1070,1071c + List list = configInfoAggrRepository.findAllAggrGroup(); + return ConfigInfoChangedMapStruct.INSTANCE.convertConfigInfoChangedList(list); +====1 +1:1853,1864c + String sql = "SELECT datum_id FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND content = ? "; + + try { + return this.jt.queryForList(sql, new Object[] {dataId, groupId, content}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1076c +3:1076c + return null; +====1 +1:1869,1877c + try { + List> list = jt.queryForList( + "SELECT data_id, group_id, tenant_id, app_name, content, gmt_modified FROM config_info WHERE gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertChangeConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1081,1084c +3:1081,1084c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Iterable iterable = configInfoRepository + .findAll(qConfigInfo.gmtModified.goe(startTime).and(qConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList((List) iterable); +====1 +1:1884,1924c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_modified from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + if (!StringUtils.isBlank(tenantTmp)) { + where += " and tenant_id = ? "; + params.add(tenantTmp); + } + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (startTime != null) { + where += " and gmt_modified >=? "; + params.add(startTime); + } + if (endTime != null) { + where += " and gmt_modified <=? "; + params.add(endTime); + } + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + lastMaxId, CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1091c +3:1091c + return null; +====1 +1:1929,1937c + try { + List> list = jt.queryForList( + "SELECT DISTINCT data_id, group_id, tenant_id FROM his_config_info WHERE op_type = 'D' AND gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertDeletedConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1096,1100c +3:1096,1100c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository.findAll( + qHisConfigInfo.opType.eq("D").and(qHisConfigInfo.gmtModified.goe(startTime)) + .and(qHisConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList((List) iterable); +====1 +1:1943,1947c + final String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + final String tenantTmp = + StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + +2:1105a +3:1105a +====1 +1:1953,1960c + + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + + KeyHolder keyHolder = new GeneratedKeyHolder(); + + final String sql = + "INSERT INTO config_info(data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_create," + + "gmt_modified,c_desc,c_use,effect,type,c_schema) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; +2:1111,1120c +3:1111,1120c + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setGmtCreate(time); + configInfoEntity.setGmtModified(time); +====1 +1:1963,1991c + jt.update(new PreparedStatementCreator() { + @Override + public PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + PreparedStatement ps = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS); + ps.setString(1, configInfo.getDataId()); + ps.setString(2, configInfo.getGroup()); + ps.setString(3, tenantTmp); + ps.setString(4, appNameTmp); + ps.setString(5, configInfo.getContent()); + ps.setString(6, md5Tmp); + ps.setString(7, srcIp); + ps.setString(8, srcUser); + ps.setTimestamp(9, time); + ps.setTimestamp(10, time); + ps.setString(11, desc); + ps.setString(12, use); + ps.setString(13, effect); + ps.setString(14, type); + ps.setString(15, schema); + return ps; + } + }, keyHolder); + Number nu = keyHolder.getKey(); + if (nu == null) { + throw new IllegalArgumentException("insert config_info fail"); + } + return nu.longValue(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:1123,1124c +3:1123,1124c + return configInfoRepository.save(configInfoEntity).getId(); + } catch (Exception e) { +====1 +1:2008,2015c + try { + jt.update( + "INSERT INTO config_tags_relation(id,tag_name,tag_type,data_id,group_id,tenant_id) VALUES(?,?,?,?,?,?)", + configId, tagName, null, dataId, group, tenant); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1141,1147c +3:1141,1147c + ConfigTagsRelationEntity configTagsRelation = new ConfigTagsRelationEntity(); + configTagsRelation.setId(configId); + configTagsRelation.setTagName(tagName); + configTagsRelation.setDataId(dataId); + configTagsRelation.setGroupId(group); + configTagsRelation.setTenantId(tenant); + configTagsRelationRepository.save(configTagsRelation); +====1 +1:2020,2025c + try { + jt.update("DELETE FROM config_tags_relation WHERE id=?", id); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1152c +3:1152c + configTagsRelationRepository.findById(id).ifPresent(s -> configTagsRelationRepository.delete(s)); +====1 +1:2030,2040c + String sql = "SELECT tag_name FROM config_tags_relation WHERE tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1157c +3:1157c + return null; +====1 +1:2045,2055c + String sql = "SELECT tag_name FROM config_tags_relation WHERE data_id=? AND group_id=? AND tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {dataId, group, tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1162,1176c +3:1162,1176c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigTagsRelationEntity qConfigTagsRelation = QConfigTagsRelationEntity.configTagsRelationEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigTagsRelation.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigTagsRelation.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigTagsRelation.tenantId.eq(tenant)); + } + Iterable iterable = configTagsRelationRepository.findAll(booleanBuilder); + List result = new ArrayList<>(); + iterable.forEach(s -> result.add(s.getTagName())); + return result; +====1 +1:2061,2067c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + jt.update("DELETE FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, group, + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1182,1187c +3:1182,1187c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); +====1 +1:2068a +2:1189,1190c +3:1189,1190c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + configInfos.forEach(s -> configInfoRepository.delete(s)); +====1 +1:2076,2077c + StringBuilder sql = new StringBuilder(SQL_DELETE_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1198,1200c +3:1198,1200c + if (StringUtils.isBlank(ids)) { + return; + } +====1 +1:2081,2084c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1203a +3:1203a +====1 +1:2087,2093c + sql.append(") "); + try { + jt.update(sql.toString(), paramList.toArray()); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1206,1219c +3:1206,1219c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + for (Long id : paramList) { + configInfoRepository.deleteById(id); + } + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2099,2106c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String tagTmp = StringUtils.isBlank(tag) ? StringUtils.EMPTY : tag; + try { + jt.update("DELETE FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", dataId, + group, tenantTmp, tagTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1225,1234c +3:1225,1234c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); +====1 +1:2107a +2:1236,1251c +3:1236,1251c + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + Iterable configInfoTags = configInfoTagRepository.findAll(booleanBuilder); + configInfoTags.forEach(s -> configInfoTagRepository.delete(s)); + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2113,2115c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String tenantTmp = StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:1257,1258c +3:1257,1258c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:2122,2131c + try { + jt.update("UPDATE config_info SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?," + + "app_name=?,c_desc=?,c_use=?,effect=?,type=?,c_schema=? " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5Tmp, srcIp, srcUser, + time, appNameTmp, desc, use, effect, type, schema, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1265,1272c +3:1265,1272c + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setGmtModified(time); + configInfoRepository.save(configInfoEntity); +====1 +1:2139,2140c + StringBuilder sql = new StringBuilder(SQL_FIND_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1279a +3:1279a +====1 +1:2144,2147c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1282a +3:1282a +====1 +1:2150,2158c + sql.append(") "); + try { + return this.jt.query(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1285,1288c +3:1285,1288c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + List list = (List) configInfoRepository + .findAll(qConfigInfo.id.in(paramList)); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(list); +====1 +1:2163,2176c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAdvanceInfo configAdvance = this.jt.queryForObject( + "SELECT gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_ADVANCE_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1293,1314c +3:1293,1314c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAdvanceInfo configAdvance = ConfigAdvanceInfoMapStruct.INSTANCE.convertConfigAdvanceInfo(configInfo); + List configTagList = this.selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2178c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1315a +3:1315a +====1 +1:2180,2185c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1317c +3:1317c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2186a +2:1319c +3:1319c + return configAdvance; +====1 +1:2191,2206c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAllInfo configAdvance = this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5," + + "gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_ALL_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1324,1346c +3:1324,1346c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAllInfo configAdvance = ConfigAllInfoMapStruct.INSTANCE.convertConfigAllInfo(configInfo); + configAdvance.setGroup(configInfo.getGroupId()); + List configTagList = selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2208c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1347a +3:1347a +====1 +1:2210,2215c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1349c +3:1349c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2216a +2:1351c +3:1351c + return configAdvance; +====1 +1:2225,2233c + try { + jt.update( + "INSERT INTO his_config_info (id,data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_modified,op_type) " + + "VALUES(?,?,?,?,?,?,?,?,?,?,?)", id, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp, appNameTmp, configInfo.getContent(), md5Tmp, srcIp, srcUser, time, ops); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1360,1373c +3:1360,1373c + HisConfigInfoEntity hisConfigInfo = new HisConfigInfoEntity(); + hisConfigInfo.setId(id); + hisConfigInfo.setDataId(configInfo.getDataId()); + hisConfigInfo.setGroupId(configInfo.getGroup()); + hisConfigInfo.setAppName(appNameTmp); + hisConfigInfo.setContent(configInfo.getContent()); + hisConfigInfo.setMd5(md5Tmp); + hisConfigInfo.setGmtModified(time); + hisConfigInfo.setSrcUser(srcUser); + hisConfigInfo.setSrcIp(srcIp); + hisConfigInfo.setOpType(ops); + hisConfigInfo.setTenantId(tenantTmp); + hisConfigInfo.setGmtCreate(time); + hisConfigInfoRepository.save(hisConfigInfo); +====1 +1:2239,2255c + PaginationHelper helper = createPaginationHelper(); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from his_config_info where data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select nid,data_id,group_id,tenant_id,app_name,src_ip,src_user,op_type,gmt_create,gmt_modified from his_config_info " + + "where data_id = ? and group_id = ? and tenant_id = ? order by nid desc"; + + Page page = null; + try { + page = helper + .fetchPage(sqlCountRows, sqlFetchRows, new Object[] {dataId, group, tenantTmp}, pageNo, pageSize, + HISTORY_LIST_ROW_MAPPER); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG + .error("[list-config-history] error, dataId:{}, group:{}", new Object[] {dataId, group}, e); + throw e; + } +2:1379,1389c +3:1379,1389c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + org.springframework.data.domain.Page sPage = hisConfigInfoRepository.findAll( + qHisConfigInfo.dataId.eq(dataId).and(qHisConfigInfo.groupId.eq(group)) + .and(qHisConfigInfo.tenantId.eq(tenant)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("nid")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); +====1 +1:2262,2270c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "INSERT INTO app_configdata_relation_subs(data_id,group_id,app_name,gmt_modified) VALUES(?,?,?,?)", + dataId, group, appNameTmp, date); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1395a +3:1395a +====1 +1:2276,2284c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "UPDATE app_configdata_relation_subs SET gmt_modified=? WHERE data_id=? AND group_id=? AND app_name=?", + time, dataId, group, appNameTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1401c +3:1401c + +==== +1:2289,2300c + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[list-config-history] error, nid:{}", new Object[] {nid}, e); + throw e; + } + } + + @Override +2:1406,1411c + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + } + + @Override +3:1406,1424c + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + } + + @Override + public ConfigHistoryInfo detailPreviousConfigHistory(Long id) { + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = (select max(nid) from his_config_info where id = ?) "; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {id}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[detail-previous-config-history] error, id:{}", new Object[] {id}, e); + throw e; + } + } + + @Override +====1 +1:2303,2310c + try { + jt.update( + "INSERT INTO tenant_info(kp,tenant_id,tenant_name,tenant_desc,create_source,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?)", + kp, tenantId, tenantName, tenantDesc, createResoure, time, time); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1414,1422c +3:1427,1435c + TenantInfoEntity tenantInfo = new TenantInfoEntity(); + tenantInfo.setKp(kp); + tenantInfo.setTenantId(tenantId); + tenantInfo.setTenantName(tenantName); + tenantInfo.setTenantDesc(tenantDesc); + tenantInfo.setCreateSource(createResoure); + tenantInfo.setGmtCreate(time); + tenantInfo.setGmtModified(time); + tenantInfoRepository.save(tenantInfo); +====1 +1:2315,2322c + try { + jt.update( + "UPDATE tenant_info SET tenant_name = ?, tenant_desc = ?, gmt_modified= ? WHERE kp=? AND tenant_id=?", + tenantName, tenantDesc, System.currentTimeMillis(), kp, tenantId); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1427,1432c +3:1440,1445c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + tenantInfoRepository.findOne(qTenantInfo.kp.eq(kp).and(qTenantInfo.tenantId.eq(tenantId))).ifPresent(s -> { + s.setTenantName(tenantName); + s.setTenantDesc(tenantDesc); + tenantInfoRepository.save(s); + }); +====1 +1:2327,2338c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=?"; + try { + return this.jt.query(sql, new Object[] {kp}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1437,1438c +3:1450,1451c + List list = tenantInfoRepository.findByKp(kp); + return TenantInfoMapStruct.INSTANCE.convertTenantInfoList(list); +====1 +1:2343,2354c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=? AND tenant_id=?"; + try { + return jt.queryForObject(sql, new Object[] {kp, tenantId}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1443,1444c +3:1456,1457c + TenantInfoEntity tenantInfoEntity = tenantInfoRepository.findByKpAndTenantId(kp, tenantId); + return TenantInfoMapStruct.INSTANCE.convertTenantInfo(tenantInfoEntity); +====1 +1:2359,2364c + try { + jt.update("DELETE FROM tenant_info WHERE kp=? AND tenant_id=?", kp, tenantId); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1449,1450c +3:1462,1463c + tenantInfoRepository.findOne(QTenantInfoEntity.tenantInfoEntity.tenantId.eq(tenantId) + .and(QTenantInfoEntity.tenantInfoEntity.kp.eq(kp))).ifPresent(s -> tenantInfoRepository.delete(s)); +====1 +1:2418,2431c + String sqlCountRows = " SELECT COUNT(*) FROM config_info "; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,md5,type,gmt_modified FROM " + + "( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) g, config_info t WHERE g.id = t.id"; + PaginationHelper helper = createPaginationHelper(); + try { + Page page = helper + .fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_WRAPPER_ROW_MAPPER); + + return page.getPageItems(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1504c +3:1517c + return null; +====1 +1:2448,2458c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,type,gmt_modified,md5 FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1521,1525c +3:1534,1538c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + ConfigInfoEntity result = configInfoRepository.findOne( + qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))).orElse(null); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapper(result); +====1 +1:2463,2469c + String sql = String.format("select 1 from %s limit 1", tableName); + try { + jt.queryForObject(sql, Integer.class); + return true; + } catch (Throwable e) { + return false; + } +2:1530c +3:1543c + return true; +====1 +1:2518,2530c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList<>(); + if (!CollectionUtils.isEmpty(ids)) { + where.append(" id in ("); + for (int i = 0; i < ids.size(); i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(ids.get(i)); + } + where.append(") "); +2:1579,1582c +3:1592,1595c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (!org.springframework.util.CollectionUtils.isEmpty(ids)) { + booleanBuilder.and(qConfigInfo.id.in(ids)); +====1 +1:2532,2536c + where.append(" tenant_id=? "); + paramList.add(tenantTmp); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + paramList.add(generateLikeArgument(dataId)); +2:1584,1588c +3:1597,1601c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.like(dataId)); +====1 +1:2539,2540c + where.append(" and group_id=? "); + paramList.add(group); +2:1591c +3:1604c + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:2543,2544c + where.append(" and app_name=? "); + paramList.add(appName); +2:1594c +3:1607c + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:2547,2552c + try { + return this.jt.query(SQL_FIND_ALL_CONFIG_INFO + where, paramList.toArray(), CONFIG_ALL_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1597,1605c +3:1610,1618c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + List resultList = new ArrayList<>(); + configInfos.forEach(s -> { + ConfigAllInfo configAllInfo = new ConfigAllInfo(); + BeanUtils.copyProperties(s, configAllInfo); + configAllInfo.setGroup(s.getGroupId()); + resultList.add(configAllInfo); + }); + return resultList; +====1 +1:2643,2647c + Integer result = this.jt + .queryForObject(SQL_TENANT_INFO_COUNT_BY_TENANT_ID, new String[] {tenantId}, Integer.class); + if (result == null) { + return 0; + } +2:1696,1697c +3:1709,1710c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + Long result = tenantInfoRepository.count(qTenantInfo.tenantId.eq(tenantId)); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_MergeDatumService.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_MergeDatumService.java.txt new file mode 100644 index 0000000000..154063ccc5 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_MergeDatumService.java.txt @@ -0,0 +1,35 @@ +====3 +1:30,31c +2:30,31c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +3:30,32c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.ApplicationUtils; + import com.alibaba.nacos.sys.utils.InetUtils; +====3 +1:109c +2:109c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIp()); +3:110c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIP()); +====3 +1:117c +2:117c + if (ApplicationUtils.getStandaloneMode()) { +3:118c + if (EnvUtil.getStandaloneMode()) { +==== +1:166,168c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn("[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); +2:166,169c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn( + "[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); +3:167,169c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIP(), null); + LOGGER.warn("[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_MergeTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_MergeTaskProcessor.java.txt new file mode 100644 index 0000000000..f8190d8c70 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_MergeTaskProcessor.java.txt @@ -0,0 +1,38 @@ +==== +1:20,21c + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:20c + import com.alibaba.nacos.common.task.AbstractDelayTask; +3:20,21c + import com.alibaba.nacos.common.task.NacosTask; + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:22a +2:22c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====3 +1:31c +2:31c + import com.alibaba.nacos.core.utils.InetUtils; +3:32c + import com.alibaba.nacos.sys.utils.InetUtils; +====3 +1:55c +2:55c + public boolean process(AbstractDelayTask task) { +3:56c + public boolean process(NacosTask task) { +====3 +1:87c +2:87c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +3:88c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), +====3 +1:101c +2:101c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +3:102c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_NotifyTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_NotifyTaskProcessor.java.txt new file mode 100644 index 0000000000..b34849e3c8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_NotifyTaskProcessor.java.txt @@ -0,0 +1,58 @@ +==== +1:20,21c + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:20c + import com.alibaba.nacos.common.task.AbstractDelayTask; +3:20,21c + import com.alibaba.nacos.common.task.NacosTask; + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:22a +2:22c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====3 +1:27,28c +2:27,28c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +3:28,29c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====3 +1:49c +2:49c + public boolean process(AbstractDelayTask task) { +3:50c + public boolean process(NacosTask task) { +====3 +1:76c +2:76c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +3:77c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====3 +1:78c +2:78c + .format(URL_PATTERN, serverIp, ApplicationUtils.getContextPath(), dataId, group); +3:79c + .format(URL_PATTERN, serverIp, EnvUtil.getContextPath(), dataId, group); +====3 +1:82c +2:82c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +3:83c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====3 +1:92c +2:92c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +3:93c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====3 +1:100c +2:100c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +3:101c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_application.properties.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_application.properties.txt new file mode 100644 index 0000000000..9395a60678 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_application.properties.txt @@ -0,0 +1,85 @@ +====3 +1:40,41c +2:40,41c + # db.user=nacos + # db.password=nacos +3:40,41c + # db.user.0=nacos + # db.password.0=nacos +====3 +1:112c +2:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-fe/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +3:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-ui/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +==== +1:177a +2:178,210c + + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + +3:178,211c + + + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..25aac30ad6 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt @@ -0,0 +1,163 @@ +====3 +1:25c +2:25c + 1.4.0-SNAPSHOT +3:25c + 1.4.1-SNAPSHOT +====3 +1:39c +2:39c + nacos-all-1.4.0-SNAPSHOT +3:39c + nacos-all-1.4.1-SNAPSHOT +====3 +1:129c +2:129c + 2.1.16.RELEASE +3:129c + 2.1.17.RELEASE +====3 +1:131c +2:131c + 2.6 +3:130a +====1 +1:133c + 2.2 +2:133c +3:132c + 2.6 +====3 +1:144c +2:144c + 1.7.17 +3:142a +====1 +1:170a +2:171,177c +3:169,175c + 1.3.2.beta1 + 1.3.2.beta1 + 1.3.1.Final + 19.3.0.0 + 4.2.1 + 3.4.1 + 1.18.12 +====1 +1:279a +2:287,289c +3:285,287c + **/com/alibaba/nacos/config/server/modules/entity/*.java + **/com/alibaba/nacos/config/server/modules/mapstruct/*.java + **/com/alibaba/nacos/config/server/configuration/datasource/DynamicDataSource.java +==== +1:307c + **/istio/model/**,**/nacos/test/** +2:317c + **/istio/model/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** +3:315c + **/istio/model/**,**/consistency/entity/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** +====3 +1:341a +2:351a +3:350c + /console-ui/** +====3 +1:553a +2:563a +3:563,581c + + remove-test-data + + + + org.apache.maven.plugins + maven-clean-plugin + + false + + + ${user.home}/nacos/data + + + + + + + +====3 +1:581a +2:591a +3:610c + sys +====3 +1:688a +2:698a +3:718,722c + + ${project.groupId} + nacos-sys + ${project.version} + +====3 +1:712,717c +2:722,727c + + commons-lang + commons-lang + ${commons-lang.version} + + +3:745a +====3 +1:817,822c +2:827,832c + + com.ning + async-http-client + ${async-http-client.version} + + +3:844a +====1 +1:1027a +2:1038,1075c +3:1050,1087c + + + org.mapstruct + mapstruct-jdk8 + ${mapstruct.version} + + + + org.mapstruct + mapstruct-processor + ${mapstruct.version} + + + + org.projectlombok + lombok + true + ${lombok.version} + + + + com.querydsl + querydsl-jpa + ${querydsl.version} + + + + com.zaxxer + HikariCP + ${hikariCP.version} + + + + com.oracle.ojdbc + ojdbc8 + ${ojdbc.version} + + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_AsyncNotifyService.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_AsyncNotifyService.java.txt new file mode 100644 index 0000000000..f5fe26eb1d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_AsyncNotifyService.java.txt @@ -0,0 +1,83 @@ +====1 +1:35,36c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:35,36c +3:35,36c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:107c + +2:107c +3:107c + +====1 +1:130c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +2:130c +3:130c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +====2 +1:135a +3:135a +2:136,143c + <<<<<<< HEAD + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, + String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); + ||||||| a41d209d5 + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); + ======= +==== +1:137c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +2:145,146c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); + >>>>>>> TEMP_RIGHT_BRANCH +3:137c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====1 +1:157c + +2:166c +3:157c + +====1 +1:171c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +2:180c +3:171c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +====1 +1:177c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +2:186c +3:177c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +====1 +1:199c + InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +2:208c +3:199c + InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +====1 +1:262c + this.url = MessageFormat.format(URL_PATTERN, target, ApplicationUtils.getContextPath(), dataId, group); +2:271c +3:262c + this.url = MessageFormat.format(URL_PATTERN, target, EnvUtil.getContextPath(), dataId, group); +====1 +1:265c + .format(URL_PATTERN_TENANT, target, ApplicationUtils.getContextPath(), dataId, group, tenant); +2:274c +3:265c + .format(URL_PATTERN_TENANT, target, EnvUtil.getContextPath(), dataId, group, tenant); +====1 +1:312c + } +\ No newline at end of file +2:321c +3:312c + } diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_ConfigController.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_ConfigController.java.txt new file mode 100644 index 0000000000..c1619171c1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_ConfigController.java.txt @@ -0,0 +1,193 @@ +====1 +1:18a +2:19c +3:19c + import com.alibaba.nacos.api.config.ConfigType; +====1 +1:22a +2:24c +3:24c + import com.alibaba.nacos.common.model.RestResultUtils; +==== +1:34a +2:37,38c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; + <<<<<<< HEAD +3:37c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +====2 +1:35a +3:38a +2:40,43c + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.result.ResultBuilder; + ======= + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:38c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +2:45a +3:40a +====1 +1:45a +2:53c +3:48c + import com.alibaba.nacos.common.utils.NamespaceUtil; +====1 +1:48c + import com.alibaba.nacos.core.utils.InetUtils; +2:56c +3:51c + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:93,94c + private static final String NAMESPACE_PUBLIC_KEY = "public"; + +2:100a +3:95a +====1 +1:137a +2:144,147c +3:139,142c + //check type + if (!ConfigType.isValidType(type)) { + type = ConfigType.getDefaultType().getType(); + } +====1 +1:178c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:188c +3:183c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:199c + tenant = processTenant(tenant); +2:209c +3:204c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====1 +1:283c + return ResultBuilder.buildSuccessResult(true); +2:293c +3:288c + return RestResultUtils.success(true); +====1 +1:472c + tenant = processTenant(tenant); +2:482c +3:477c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====1 +1:527c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:537c +3:532c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:530,534c + if (StringUtils.isNotBlank(namespace)) { + if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); + } +2:540,543c +3:535,538c + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====1 +1:535a +2:545c +3:540c + +====1 +1:548c + return ResultBuilder.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +2:558c +3:553c + return RestResultUtils.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +====1 +1:560c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +2:570c +3:565c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +====1 +1:584c + return ResultBuilder.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +2:594c +3:589c + return RestResultUtils.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +====1 +1:588c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:598c +3:593c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:601c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:611c +3:606c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:604c + return ResultBuilder.buildSuccessResult("导入成功", saveResult); +2:614c +3:609c + return RestResultUtils.success("导入成功", saveResult); +====1 +1:628c + return ResultBuilder.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +2:638c +3:633c + return RestResultUtils.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +====1 +1:631,634c + + if (NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(namespace)) { + namespace = ""; + } else if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { +2:641,643c +3:636,638c + + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { +====1 +1:636c + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +2:645c +3:640c + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====1 +1:650c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:659c +3:654c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:674c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:683c +3:678c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:687c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:696c +3:691c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:690,697c + return ResultBuilder.buildSuccessResult("Clone Completed Successfully", saveResult); + } + + private String processTenant(String tenant) { + if (StringUtils.isEmpty(tenant) || NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(tenant)) { + return ""; + } + return tenant; +2:699c +3:694c + return RestResultUtils.success("Clone Completed Successfully", saveResult); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_DiskUtils.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_DiskUtils.java.txt new file mode 100644 index 0000000000..edbfb2552d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_DiskUtils.java.txt @@ -0,0 +1,24 @@ +356,364d355 +< <<<<<<< HEAD:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileOutputStream fos = new FileOutputStream( +< outputFile); final CheckedOutputStream cos = new CheckedOutputStream(fos, +< checksum); final ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(cos))) { +< ||||||| a41d209d5:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileOutputStream fos = new FileOutputStream( +< outputFile); final CheckedOutputStream cos = new CheckedOutputStream(fos, checksum); +< final ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(cos))) { +< ======= +368d358 +< >>>>>>> TEMP_RIGHT_BRANCH:sys/src/main/java/com/alibaba/nacos/sys/utils/DiskUtils.java +407,415d396 +< <<<<<<< HEAD:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileInputStream fis = new FileInputStream( +< sourceFile); final CheckedInputStream cis = new CheckedInputStream(fis, +< checksum); final ZipInputStream zis = new ZipInputStream(new BufferedInputStream(cis))) { +< ||||||| a41d209d5:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileInputStream fis = new FileInputStream( +< sourceFile); final CheckedInputStream cis = new CheckedInputStream(fis, checksum); +< final ZipInputStream zis = new ZipInputStream(new BufferedInputStream(cis))) { +< ======= +419d399 +< >>>>>>> TEMP_RIGHT_BRANCH:sys/src/main/java/com/alibaba/nacos/sys/utils/DiskUtils.java diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_EmbeddedRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_EmbeddedRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..465add68ba --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_EmbeddedRolePersistServiceImpl.java.txt @@ -0,0 +1,186 @@ +====1 +1:21c + import com.alibaba.nacos.config.server.service.repository.embedded.EmbeddedStoragePersistServiceImpl; +2:20a +3:20a +====1 +1:23a +2:23c +3:23c + import com.alibaba.nacos.config.server.service.repository.embedded.EmbeddedStoragePersistServiceImpl; +====1 +1:30a +2:31c +3:31c + import java.util.Collections; +====2 +1:43c +3:44c + +2:44c + +====2 +1:46c +3:47c + +2:47c + +====2 +1:49c +3:50c + +2:50c + +====2 +1:51c +3:52c + +2:52c + +====2 +1:53c +3:54c + +2:54c + +====2 +1:56c +3:57c + +2:57c + +====2 +1:58c +3:59c + +2:59c + +====2 +1:68c +3:69c + +2:69c + +====2 +1:70c +3:71c + +2:71c + +====2 +1:72c +3:73c + +2:73c + +====2 +1:74c +3:75c + +2:75c + +==== +1:77,80c + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { +2:78,95c + <<<<<<< HEAD + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + ||||||| a41d209d5 + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + ======= + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { + >>>>>>> TEMP_RIGHT_BRANCH +3:78,83c + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { +====2 +1:82a +3:85a +2:98,104c + <<<<<<< HEAD + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + ||||||| a41d209d5 + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + ======= +==== +1:84c + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, +2:106,107c + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, + >>>>>>> TEMP_RIGHT_BRANCH +3:87c + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, +====2 +1:86c +3:89c + +2:109c + +====2 +1:88c +3:91c + +2:111c + +====2 +1:96c +3:99c + +2:119c + +====2 +1:98c +3:101c + +2:121c + +====2 +1:106c +3:109c + +2:129c + +====2 +1:121c +3:124c + +2:144c + +====2 +1:137c +3:140c + +2:160c + +====2 +1:141c +3:144c + List users = databaseOperate.queryMany(sql, new String[] {"%" + role + "%"}, String.class); +2:164c + List users = databaseOperate.queryMany(sql, new String[]{"%" + role + "%"}, String.class); +====2 +1:144c +3:147c + +2:167c + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_ExternalDataSourceServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_ExternalDataSourceServiceImpl.java.txt new file mode 100644 index 0000000000..ed6e037647 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_ExternalDataSourceServiceImpl.java.txt @@ -0,0 +1,182 @@ +====1 +1:19,39c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import javax.sql.DataSource; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + +2:18a +3:18a +==== +1:40a +2:20,45c + <<<<<<< HEAD + import com.alibaba.nacos.common.utils.StringUtils; + import com.alibaba.nacos.config.server.monitor.MetricsMonitor; + import com.alibaba.nacos.config.server.utils.PropertyUtil; + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.zaxxer.hikari.HikariDataSource; + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + ||||||| a41d209d5 + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import javax.sql.DataSource; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + ======= + import com.alibaba.nacos.common.utils.IPUtil; +3:20c + import com.alibaba.nacos.common.utils.IPUtil; +====1 +1:45c + import com.alibaba.nacos.core.utils.ApplicationUtils; +2:50c +3:25c + import com.alibaba.nacos.sys.env.EnvUtil; +==== +1:46a +2:52,80c + >>>>>>> TEMP_RIGHT_BRANCH + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + <<<<<<< HEAD + import java.util.regex.Matcher; + import java.util.regex.Pattern; + ||||||| a41d209d5 + import com.alibaba.nacos.common.utils.ConvertUtils; + import com.alibaba.nacos.common.utils.StringUtils; + import com.alibaba.nacos.config.server.monitor.MetricsMonitor; + import com.alibaba.nacos.config.server.utils.ConfigExecutor; + import com.alibaba.nacos.config.server.utils.PropertyUtil; + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.zaxxer.hikari.HikariDataSource; + ======= + import java.util.concurrent.TimeUnit; + >>>>>>> TEMP_RIGHT_BRANCH + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +3:27,41c + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +====1 +1:55,58c + private static final Logger LOGGER = LoggerFactory.getLogger(ExternalDataSourceServiceImpl.class); + + private static final String JDBC_DRIVER_NAME = "com.mysql.cj.jdbc.Driver"; + +2:88a +3:49a +====1 +1:86,87c + private static Pattern ipPattern = Pattern.compile("\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}"); + +2:115a +3:76a +====1 +1:120,122c + + ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +2:148,150c +3:109,111c + + // ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + // ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +====1 +1:130c + .build(ApplicationUtils.getEnvironment(), (dataSource) -> { +2:158c +3:119c + .build(EnvUtil.getEnvironment(), (dataSource) -> { +====1 +1:193c + return "DOWN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +2:221c +3:182c + return "DOWN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====1 +1:196c + return "WARN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +2:224c +3:185c + return "WARN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====1 +1:204,217c + private String getIpFromUrl(String url) { + + Matcher m = ipPattern.matcher(url); + if (m.find()) { + return m.group(); + } + + return ""; + } + + static String defaultIfNull(String value, String defaultValue) { + return null == value ? defaultValue : value; + } + +2:231a +3:192a +====1 +1:272c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +2:286c +3:247c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); +====1 +1:275c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +2:289c +3:250c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_ExternalPermissionPersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_ExternalPermissionPersistServiceImpl.java.txt new file mode 100644 index 0000000000..3c95b4eb48 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_ExternalPermissionPersistServiceImpl.java.txt @@ -0,0 +1,215 @@ +==== +1:21c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +2:21,31c + <<<<<<< HEAD + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + ======= +3:20a +====1 +1:22a +2:33c +3:22c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +==== +1:24a +2:36c + >>>>>>> TEMP_RIGHT_BRANCH +3:25,28c + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; +====1 +1:27,28c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; +2:39c +3:31c + import org.springframework.data.domain.PageRequest; +====2 +1:30a +3:33a +2:42,49c + <<<<<<< HEAD + ||||||| a41d209d5 + import javax.annotation.PostConstruct; + import java.util.ArrayList; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.PERMISSION_ROW_MAPPER; + + ======= +====1 +1:32a +2:52,53c +3:36,37c + import java.util.Collections; + import java.util.List; +====2 +1:35a +3:40a +2:57c + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:46,53c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:68c +3:51c + private PermissionsRepository permissionsRepository; +==== +1:56,83c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role='" + role + "' "; + + if (StringUtils.isBlank(role)) { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:71,140c + <<<<<<< HEAD + org.springframework.data.domain.Page sPage = permissionsRepository + .findAll(QPermissionsEntity.permissionsEntity.role.eq(role), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(PermissionsMapStruct.INSTANCE.convertPermissionInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + ||||||| a41d209d5 + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role='" + role + "' "; + + if (StringUtils.isBlank(role)) { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + ======= + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(role)) { + params = Collections.singletonList(role); + } else { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:54,61c + org.springframework.data.domain.Page sPage = permissionsRepository + .findAll(QPermissionsEntity.permissionsEntity.role.eq(role), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(PermissionsMapStruct.INSTANCE.convertPermissionInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:94,102c + + String sql = "INSERT into permissions (role, resource, action) VALUES (?, ?, ?)"; + + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:151,152c +3:72,73c + + permissionsRepository.save(new PermissionsEntity(role, resource, action)); +====1 +1:113,120c + + String sql = "DELETE from permissions WHERE role=? and resource=? and action=?"; + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:163,167c +3:84,88c + + QPermissionsEntity qPermissions = QPermissionsEntity.permissionsEntity; + permissionsRepository.findOne( + qPermissions.role.eq(role).and(qPermissions.resource.eq(resource)).and(qPermissions.action.eq(action))) + .ifPresent(p -> permissionsRepository.delete(p)); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_ExternalRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_ExternalRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..b2b75323b7 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_ExternalRolePersistServiceImpl.java.txt @@ -0,0 +1,283 @@ +==== +1:21c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +2:21,31c + <<<<<<< HEAD + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + ======= +3:20a +====1 +1:22a +2:33c +3:22c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +==== +1:24a +2:36c + >>>>>>> TEMP_RIGHT_BRANCH +3:25,28c + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; +====1 +1:27,29c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.RowMapper; +2:39c +3:31c + import org.springframework.data.domain.PageRequest; +====2 +1:31a +3:33a +2:42,48c + <<<<<<< HEAD + ||||||| a41d209d5 + import javax.annotation.PostConstruct; + import java.sql.ResultSet; + import java.sql.SQLException; + import java.util.ArrayList; + ======= +==== +1:35a +2:53,54c + import java.util.Collections; + >>>>>>> TEMP_RIGHT_BRANCH +3:38c + import java.util.Collections; +====1 +1:36a +2:56c +3:40c + import java.util.stream.Collectors; +====1 +1:38c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.ROLE_INFO_ROW_MAPPER; +2:57a +3:41a +====1 +1:50,57c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:69c +3:53c + private RolesRepository rolesRepository; +====1 +1:61,81c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from (select distinct role from roles) roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " 1=1 "; + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + return pageInfo; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:73,80c +3:57,64c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +==== +1:86,104c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + where = " 1=1 "; + } + + try { + return helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:85,135c + <<<<<<< HEAD + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(QRolesEntity.rolesEntity.username.eq(username), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + ||||||| a41d209d5 + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + where = " 1=1 "; + } + + try { + return helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + ======= + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { + where = " 1=1 "; + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:69,76c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(QRolesEntity.rolesEntity.username.eq(username), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:110c + * @param role role string value. +2:141c +3:82c + * @param role role string value. +====1 +1:115,122c + String sql = "INSERT into roles (role, username) VALUES (?, ?)"; + + try { + jt.update(sql, role, userName); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:146c +3:87c + rolesRepository.save(new RolesEntity(userName, role)); +====1 +1:131,137c + String sql = "DELETE from roles WHERE role=?"; + try { + jt.update(sql, role); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:155,156c +3:96,97c + Iterable iterable = rolesRepository.findAll(QRolesEntity.rolesEntity.role.eq(role)); + rolesRepository.deleteAll(iterable); +====1 +1:143c + * @param role role string value. +2:162c +3:103c + * @param role role string value. +====1 +1:147,153c + String sql = "DELETE from roles WHERE role=? and username=?"; + try { + jt.update(sql, role, username); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:166,168c +3:107,109c + QRolesEntity qRoles = QRolesEntity.rolesEntity; + rolesRepository.findOne(qRoles.role.eq(role).and(qRoles.username.eq(username))) + .ifPresent(s -> rolesRepository.delete(s)); +==== +1:158,160c + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[]{role}, String.class); + return users; +2:173,185c + <<<<<<< HEAD + List rolesEntities = (List) rolesRepository + .findAll(QRolesEntity.rolesEntity.role.like(role)); + return rolesEntities.stream().map(s -> s.getRole()).collect(Collectors.toList()); + ||||||| a41d209d5 + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[]{role}, String.class); + return users; + ======= + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[] {role}, String.class); + return users; + >>>>>>> TEMP_RIGHT_BRANCH +3:114,116c + List rolesEntities = (List) rolesRepository + .findAll(QRolesEntity.rolesEntity.role.like(role)); + return rolesEntities.stream().map(s -> s.getRole()).collect(Collectors.toList()); +====1 +1:163,172c + private static final class RoleInfoRowMapper implements RowMapper { + + @Override + public RoleInfo mapRow(ResultSet rs, int rowNum) throws SQLException { + RoleInfo roleInfo = new RoleInfo(); + roleInfo.setRole(rs.getString("role")); + roleInfo.setUsername(rs.getString("username")); + return roleInfo; + } + } +2:187a +3:118a diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_ExternalStoragePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_ExternalStoragePersistServiceImpl.java.txt new file mode 100644 index 0000000000..44c1365407 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_ExternalStoragePersistServiceImpl.java.txt @@ -0,0 +1,3017 @@ +====1 +1:41,42c + import com.alibaba.nacos.config.server.service.datasource.DataSourceService; + import com.alibaba.nacos.config.server.service.datasource.DynamicDataSource; +2:41,74c +3:41,74c + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.HisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.QHisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QTenantInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.TenantInfoEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAdvanceInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAllInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigHistoryInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4BetaMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4TagMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoAggrMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoBetaWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoChangedMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoEntityMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoTagWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.TenantInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoAggrRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoBetaRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoTagRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigTagsRelationRepository; + import com.alibaba.nacos.config.server.modules.repository.HisConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.TenantInfoRepository; +====1 +1:48c + import com.google.common.collect.Lists; +2:80,81c +3:80,81c + import com.querydsl.core.BooleanBuilder; + import lombok.extern.slf4j.Slf4j; +====1 +1:50a +2:84,85c +3:84,85c + import org.springframework.beans.BeanUtils; + import org.springframework.beans.factory.annotation.Autowired; +====1 +1:55c + import org.springframework.dao.IncorrectResultSizeDataAccessException; +2:90,92c +3:90,92c + import org.springframework.data.domain.PageRequest; + import org.springframework.data.domain.Sort; + import org.springframework.data.jpa.domain.Specification; +====1 +1:57,61c + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.PreparedStatementCreator; + import org.springframework.jdbc.core.PreparedStatementSetter; + import org.springframework.jdbc.support.GeneratedKeyHolder; + import org.springframework.jdbc.support.KeyHolder; +2:93a +3:93a +====1 +1:70c + import javax.annotation.PostConstruct; +2:102,105c +3:102,105c + import javax.persistence.criteria.CriteriaBuilder; + import javax.persistence.criteria.CriteriaQuery; + import javax.persistence.criteria.Predicate; + import javax.persistence.criteria.Root; +====1 +1:72,75c + import java.sql.Connection; + import java.sql.PreparedStatement; + import java.sql.SQLException; + import java.sql.Statement; +2:106a +3:106a +====1 +1:81a +2:113c +3:113c + import java.util.stream.Collectors; +====1 +1:83,97c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ADVANCE_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ALL_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4TAG_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_AGGR_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BASE_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_CHANGED_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_KEY_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_DETAIL_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_LIST_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.TENANT_INFO_ROW_MAPPER; +2:114a +3:114a +====1 +1:104a +2:122c +3:122c + @Slf4j +====1 +1:110c + private DataSourceService dataSourceService; +2:128,129c +3:128,129c + @Autowired + private ConfigInfoRepository configInfoRepository; +====1 +1:112c + private static final String SQL_FIND_ALL_CONFIG_INFO = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,c_schema from config_info"; +2:131,132c +3:131,132c + @Autowired + private ConfigInfoBetaRepository configInfoBetaRepository; +====1 +1:114c + private static final String SQL_TENANT_INFO_COUNT_BY_TENANT_ID = "select count(1) from tenant_info where tenant_id = ?"; +2:134,135c +3:134,135c + @Autowired + private ConfigInfoTagRepository configInfoTagRepository; +====1 +1:116c + private static final String SQL_FIND_CONFIG_INFO_BY_IDS = "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5 FROM config_info WHERE "; +2:137,138c +3:137,138c + @Autowired + private ConfigTagsRelationRepository configTagsRelationRepository; +====1 +1:118c + private static final String SQL_DELETE_CONFIG_INFO_BY_IDS = "DELETE FROM config_info WHERE "; +2:140,141c +3:140,141c + @Autowired + private HisConfigInfoRepository hisConfigInfoRepository; +====1 +1:120c + private static final String PATTERN_STR = "*"; +2:143,144c +3:143,144c + @Autowired + private TenantInfoRepository tenantInfoRepository; +====1 +1:122c + private static final int QUERY_LIMIT_SIZE = 50; +2:146,147c +3:146,147c + @Autowired + private ConfigInfoAggrRepository configInfoAggrRepository; +====1 +1:124,126c + protected JdbcTemplate jt; + + protected TransactionTemplate tjt; +2:149,150c +3:149,150c + @Autowired + private TransactionTemplate tjt; +====1 +1:133,171c + /** + * init datasource. + */ + @PostConstruct + public void init() { + dataSourceService = DynamicDataSource.getInstance().getDataSource(); + + jt = getJdbcTemplate(); + tjt = getTransactionTemplate(); + } + + public boolean checkMasterWritable() { + return dataSourceService.checkMasterWritable(); + } + + public void setBasicDataSourceService(DataSourceService dataSourceService) { + this.dataSourceService = dataSourceService; + } + + public synchronized void reload() throws IOException { + this.dataSourceService.reload(); + } + + /** + * For unit testing. + */ + public JdbcTemplate getJdbcTemplate() { + return this.dataSourceService.getJdbcTemplate(); + } + + public TransactionTemplate getTransactionTemplate() { + return this.dataSourceService.getTransactionTemplate(); + } + + @SuppressWarnings("checkstyle:AbbreviationAsWordInName") + public String getCurrentDBUrl() { + return this.dataSourceService.getCurrentDbUrl(); + } + +2:156a +3:156a +====1 +1:174c + return new ExternalStoragePaginationHelperImpl(jt); +2:159c +3:159c + return null; +====1 +1:182,191c + boolean result = tjt.execute(status -> { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:167,183c +3:167,183c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfoEntity.getGroupId(), + configInfoEntity.getTenantId()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; + } + return Boolean.TRUE; +====1 +1:193c + return Boolean.TRUE; +2:184a +3:184a +====1 +1:202c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:193c +3:193c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:204,207c + jt.update("INSERT INTO config_info_beta(data_id,group_id,tenant_id,app_name,content,md5,beta_ips,src_ip," + + "src_user,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, appNameTmp, configInfo.getContent(), md5, betaIps, srcIp, srcUser, + time, time); +2:195,207c +3:195,207c + ConfigInfoBetaEntity configInfoBeta = new ConfigInfoBetaEntity(); + configInfoBeta.setDataId(configInfo.getDataId()); + configInfoBeta.setGroupId(configInfo.getGroup()); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setBetaIps(betaIps); + configInfoBeta.setMd5(md5); + configInfoBeta.setGmtCreate(time); + configInfoBeta.setGmtModified(time); + configInfoBeta.setSrcUser(srcUser); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setTenantId(tenantTmp); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:209c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:209c +3:209c + log.error("[db-error] " + e.toString(), e); +====1 +1:220c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:219a +3:219a +====1 +1:222,226c + jt.update( + "INSERT INTO config_info_tag(data_id,group_id,tenant_id,tag_id,app_name,content,md5,src_ip,src_user," + + "gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, tagTmp, appNameTmp, configInfo.getContent(), md5, srcIp, srcUser, + time, time); +2:221,234c +3:221,234c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoTagEntity configInfoTag = new ConfigInfoTagEntity(); + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:228c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:236c +3:236c + log.error("[db-error] " + e.toString(), e); +====1 +1:236,254c + boolean result = tjt.execute(status -> { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + /* + If the appName passed by the user is not empty, use the persistent user's appName, + otherwise use db; when emptying appName, you need to pass an empty string + */ + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // delete all tags and then recreate + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); +2:244,268c +3:244,268c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + // 用户传过来的appName不为空,则用持久化用户的appName,否则用db的;清空appName的时候需要传空串 + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + configInfo.setId(oldConfigInfo.getId()); + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // 删除所有tag,然后再重新创建 + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); + } + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:256,259c + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:270c +3:270c + return Boolean.TRUE; +====1 +1:261c + return Boolean.TRUE; +2:271a +3:271a +====1 +1:268c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); +2:277a +3:277a +====1 +1:270,275c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + try { + jt.update( + "UPDATE config_info_beta SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5, srcIp, srcUser, + time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp); +2:279,300c +3:279,300c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(configInfo.getDataId())) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(configInfo.getDataId())); + } + if (StringUtils.isNotBlank(configInfo.getGroup())) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(configInfo.getGroup())); + } + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenantTmp)); + } + ConfigInfoBetaEntity configInfoBeta = configInfoBetaRepository.findOne(booleanBuilder).orElse(null); + try { + String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setMd5(md5); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setSrcUser(srcUser); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:277c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:302c +3:302c + log.error("[db-error] " + e.toString(), e); +====1 +1:288,293c + try { + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + jt.update( + "UPDATE config_info_tag SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", configInfo.getContent(), md5, + srcIp, srcUser, time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp, tagTmp); +2:313,331c +3:313,331c + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + ConfigInfoTagEntity configInfoTag = configInfoTagRepository.findOne( + qConfigInfoTag.dataId.eq(configInfo.getDataId()).and(qConfigInfoTag.groupId.eq(configInfo.getGroup())) + .and(qConfigInfoTag.tenantId.eq(tenantTmp)).and(qConfigInfoTag.tagId.eq(tagTmp))) + .orElse(new ConfigInfoTagEntity()); + try { + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:295c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:333c +3:333c + log.error("[db-error] " + e.toString(), e); +====1 +1:323,330c + try { + jt.update( + "UPDATE config_info SET md5 = ? WHERE data_id=? AND group_id=? AND tenant_id=? AND gmt_modified=?", + md5, dataId, group, tenantTmp, lastTime); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:361,378c +3:361,378c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (lastTime != null) { + booleanBuilder.and(qConfigInfo.gmtModified.eq(lastTime)); + } + configInfoRepository.findOne(booleanBuilder).ifPresent(config -> { + config.setMd5(md5); + configInfoRepository.save(config); + }); +====1 +1:416,421c + tjt.execute(status -> { + try { + ConfigInfo configInfo = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo != null) { + jt.update("DELETE FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, + group, tenantTmp); +2:464,474c +3:464,474c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo4Beta configInfo4Beta = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo4Beta != null) { + configInfoBetaRepository.deleteById(configInfo4Beta.getId()); + } + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:423,425c + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:476c +3:476c + return Boolean.TRUE; +====1 +1:427c + return Boolean.TRUE; +2:477a +3:477a +====1 +1:439,442c + String select = "SELECT content FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + String insert = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + String update = "UPDATE config_info_aggr SET content = ? , gmt_modified = ? WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + +2:489,502c +3:489,502c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } +====1 +1:445,446c + String dbContent = jt + .queryForObject(select, new Object[] {dataId, group, tenantTmp, datumId}, String.class); +2:505c +3:505c + ConfigInfoAggrEntity result = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); +====1 +1:448c + if (dbContent != null && dbContent.equals(content)) { +2:507c +3:507c + if (result.getContent() != null && result.getContent().equals(content)) { +====1 +1:451c + return jt.update(update, content, now, dataId, group, tenantTmp, datumId) > 0; +2:510,513c +3:510,513c + result.setContent(content); + result.setGmtModified(now); + configInfoAggrRepository.save(result); + return true; +====1 +1:454c + return jt.update(insert, dataId, group, tenantTmp, datumId, appNameTmp, content, now) > 0; +2:516,526c +3:516,526c + ConfigInfoAggrEntity configInfoAggrEntity = new ConfigInfoAggrEntity(); + configInfoAggrEntity.setDataId(dataId); + configInfoAggrEntity.setGroupId(group); + configInfoAggrEntity.setDatumId(datumId); + configInfoAggrEntity.setContent(content); + configInfoAggrEntity.setGmtModified(now); + configInfoAggrEntity.setAppName(appNameTmp); + configInfoAggrEntity.setTenantId(tenantTmp); + configInfoAggrRepository.save(configInfoAggrEntity); + return true; + +====1 +1:465,466c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; +2:536a +3:536a +====1 +1:468,482c + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index++, tenantTmp); + ps.setString(index, datumId); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:537a +3:537a +====1 +1:487,502c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=?"; + + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index, tenantTmp); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:542,545c +3:542,545c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); +====1 +1:503a +2:547,553c +3:547,553c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + configInfoAggrRepository.findOne(booleanBuilder).ifPresent(aggr -> configInfoAggrRepository.delete(aggr)); +====1 +1:509,523c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final StringBuilder datumString = new StringBuilder(); + for (String datum : datumList) { + datumString.append("'").append(datum).append("',"); + } + datumString.deleteCharAt(datumString.length() - 1); + final String sql = + "delete from config_info_aggr where data_id=? and group_id=? and tenant_id=? and datum_id in (" + + datumString.toString() + ")"; + try { + jt.update(sql, dataId, group, tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:558a +3:558a +====1 +1:529,536c + String sql = "delete from his_config_info where gmt_modified < ? limit ?"; + PaginationHelper helper = createPaginationHelper(); + try { + helper.updateLimit(sql, new Object[] {startTime, limitSize}); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:564,567c +3:564,567c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository + .findAll(qHisConfigInfo.gmtModified.lt(startTime), PageRequest.of(0, limitSize)); + hisConfigInfoRepository.deleteAll(iterable); +====1 +1:541,542c + String sql = "SELECT COUNT(*) FROM his_config_info WHERE gmt_modified < ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {startTime}); +2:572,573c +3:572,573c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Long result = hisConfigInfoRepository.count(qHisConfigInfo.gmtModified.lt(startTime)); +====1 +1:551c + String sql = "SELECT max(id) FROM config_info"; +2:581a +3:581a +====1 +1:553c + return jt.queryForObject(sql, Long.class); +2:583,584c +3:583,584c + //TODO 关系型特性查询 + return configInfoRepository.findConfigMaxId(); +====1 +1:591,617c + try { + Boolean isReplaceOk = tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + String appNameTmp = appName == null ? "" : appName; + removeAggrConfigInfo(dataId, group, tenant); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + for (Map.Entry datumEntry : datumMap.entrySet()) { + jt.update(sql, dataId, group, tenantTmp, datumEntry.getKey(), appNameTmp, + datumEntry.getValue(), new Timestamp(System.currentTimeMillis())); + } + } catch (Throwable e) { + throw new TransactionSystemException("error in addAggrConfigInfo"); + } + return Boolean.TRUE; + } + }); + if (isReplaceOk == null) { + return false; + } + return isReplaceOk; + } catch (TransactionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:622c +3:622c + return true; +====1 +1:624,636c + String sql = "SELECT DISTINCT data_id, group_id FROM config_info"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:629c +3:629c + return null; +====1 +1:641,651c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,beta_ips FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO4BETA_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:634,647c +3:634,647c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenant)); + } + ConfigInfoBetaEntity configInfoBetaEntity = configInfoBetaRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoBeta data null")); + return ConfigInfo4BetaMapStruct.INSTANCE.convertConfigInfo4Beta(configInfoBetaEntity); +====1 +1:659,668c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,tag_id,app_name,content FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", + new Object[] {dataId, group, tenantTmp, tagTmp}, CONFIG_INFO4TAG_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:655,671c +3:655,671c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + ConfigInfoTagEntity result = configInfoTagRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoTag data null")); + return ConfigInfo4TagMapStruct.INSTANCE.convertConfigInfo4Tag(result); +====1 +1:674,684c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=? AND app_name=?", + new Object[] {dataId, group, tenantTmp, appName}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:677c +3:677c + return null; +====1 +1:690,733c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(group); + paramList.add(tenantTmp); + + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and group_id=? and tenant_id=? "); + if (StringUtils.isNotBlank(configTags)) { + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.group_id=? and a.tenant_id=? "); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sql.append(", "); + } + sql.append("?"); + paramList.add(tagArr[i]); + } + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return this.jt.queryForObject(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:683c +3:683c + return null; +====1 +1:739,748c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, StringUtils.EMPTY}, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:689,695c +3:689,695c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + return configInfoRepository.findOne(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group))).map(s -> { + ConfigInfoBase configInfoBase = new ConfigInfoBase(); + BeanUtils.copyProperties(s, configInfoBase); + configInfoBase.setGroup(s.getGroupId()); + return configInfoBase; + }).orElse(null); +====1 +1:753,762c + try { + return this.jt + .queryForObject("SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE ID=?", + new Object[] {id}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:700,701c +3:700,701c + + return null; +====1 +1:767,776c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5,type FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:706,712c +3:706,712c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:777a +2:714,718c +3:714,718c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity result = configInfoRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfo(result); +====1 +1:783,792c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:724,725c +3:724,725c + + return null; +====1 +1:798,807c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? and app_name=?", + new Object[] {dataId, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:731,732c +3:731,732c + + return null; +====1 +1:813,864c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where data_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:738c +3:738c + return null; +====1 +1:870,871c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); +2:743a +3:743a +====1 +1:874,922c + String sqlCount = "select count(*) from config_info"; + String sql = "select ID,data_id,group_id,tenant_id,app_name,content,type from config_info"; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id"; + sql = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id"; + + where.append(" a.tenant_id=? "); + + if (StringUtils.isNotBlank(dataId)) { + where.append(" and a.data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and a.group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and a.app_name=? "); + paramList.add(appName); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id=? "); + if (StringUtils.isNotBlank(dataId)) { + where.append(" and data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and app_name=? "); + paramList.add(appName); + } +2:746,765c +3:746,765c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + } + + private void buildConfigInfoCommonCondition(BooleanBuilder booleanBuilder, QConfigInfoEntity qConfigInfo, + final String dataId, final String group, final String appName) { + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); +====1 +1:924,929c + try { + return helper.fetchPage(sqlCount + where, sql + where, paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:767,771c +3:767,771c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(appName)) { + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:935,943c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:777c +3:777c + return null; +====1 +1:949,958c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:783c +3:783c + return null; +====1 +1:964,973c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=? and app_name =?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? and app_name =?", + new Object[] {group, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:789c +3:789c + return null; +====1 +1:979,1032c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder( + "select count(*) from config_info where group_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(group); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:795c +3:795c + return null; +====1 +1:1038,1047c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where tenant_id like ? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? and app_name=?", + new Object[] {generateLikeArgument(tenantTmp), appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:801c +3:801c + return null; +====1 +1:1053,1104c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where tenant_id like ? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:807c +3:807c + return null; +====1 +1:1110,1118c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:813c +3:813c + return null; +====1 +1:1123,1124c + String sql = " SELECT COUNT(ID) FROM config_info "; + Integer result = jt.queryForObject(sql, Integer.class); +2:818c +3:818c + Long result = configInfoRepository.count(); +====1 +1:1133,1134c + String sql = " SELECT COUNT(ID) FROM config_info where tenant_id like ?"; + Integer result = jt.queryForObject(sql, new Object[] {tenant}, Integer.class); +2:827,828c +3:827,828c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.tenantId.like(tenant)); +====1 +1:1143,1144c + String sql = " SELECT COUNT(ID) FROM config_info_beta "; + Integer result = jt.queryForObject(sql, Integer.class); +2:837c +3:837c + Long result = configInfoBetaRepository.count(); +====1 +1:1153,1154c + String sql = " SELECT COUNT(ID) FROM config_info_tag "; + Integer result = jt.queryForObject(sql, Integer.class); +2:846c +3:846c + Long result = configInfoTagRepository.count(); +====1 +1:1162,1165c + public List getTenantIdList(int page, int pageSize) { + String sql = "SELECT tenant_id FROM config_info WHERE tenant_id != '' GROUP BY tenant_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:854,864c +3:854,864c + public List getTenantIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("tenantId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1169,1172c + public List getGroupIdList(int page, int pageSize) { + String sql = "SELECT group_id FROM config_info WHERE tenant_id ='' GROUP BY group_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:868,878c +3:868,878c + public List getGroupIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("groupId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1178,1179c + String sql = " SELECT COUNT(ID) FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {dataId, group, tenantTmp}); +2:884,886c +3:884,886c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))); +====1 +1:1188,1213c + if (datumIds == null || datumIds.isEmpty()) { + return 0; + } + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder sql = new StringBuilder( + " SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ? and datum_id"); + if (isIn) { + sql.append(" in ("); + } else { + sql.append(" not in ("); + } + for (int i = 0, size = datumIds.size(); i < size; i++) { + if (i > 0) { + sql.append(", "); + } + sql.append("?"); + } + sql.append(")"); + + List objectList = Lists.newArrayList(dataId, group, tenantTmp); + objectList.addAll(datumIds); + Integer result = jt.queryForObject(sql.toString(), Integer.class, objectList.toArray()); + if (result == null) { + throw new IllegalArgumentException("aggrConfigInfoCount error"); + } + return result.intValue(); +2:895c +3:895c + return 0; +====1 +1:1228,1242c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5 " + + " FROM ( SELECT id FROM config_info WHERE tenant_id like ? ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, + new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:910c +3:910c + return null; +====1 +1:1247,1282c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String select = " SELECT data_id,group_id,app_name FROM ( " + + " SELECT id FROM config_info WHERE tenant_id LIKE ? ORDER BY id LIMIT ?, ? )" + + " g, config_info t WHERE g.id = t.id "; + + final int totalCount = configInfoCount(tenant); + int pageCount = totalCount / pageSize; + if (totalCount > pageSize * pageCount) { + pageCount++; + } + + if (pageNo > pageCount) { + return null; + } + + final Page page = new Page(); + page.setPageNumber(pageNo); + page.setPagesAvailable(pageCount); + page.setTotalCount(totalCount); + + try { + List result = jt + .query(select, new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, + // new Object[0], + CONFIG_KEY_ROW_MAPPER); + + for (ConfigKey item : result) { + page.getPageItems().add(item); + } + return page; + } catch (EmptyResultDataAccessException e) { + return page; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:915c +3:915c + return null; +====1 +1:1288,1300c + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,content,md5" + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) " + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:921c +3:921c + return null; +====1 +1:1305,1319c + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = " SELECT t.id,type,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + + List params = new ArrayList(); + + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, params.toArray(), pageNo, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:926c +3:926c + return null; +====1 +1:1324,1332c + String select = "SELECT id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,type from config_info where id > ? order by id asc limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(select, new Object[] {lastMaxId, 0, pageSize}, 1, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:931,940c +3:931,940c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(qConfigInfo.id.gt(lastMaxId), PageRequest.of(0, pageSize, Sort.by(Sort.Order.asc("id")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1337,1349c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_beta"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,beta_ips " + + " FROM ( SELECT id FROM config_info_beta ORDER BY id LIMIT ?,? )" + + " g, config_info_beta t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:945,952c +3:945,952c + org.springframework.data.domain.Page sPage = configInfoBetaRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoBetaWrapperMapStruct.INSTANCE.convertConfigInfoBetaWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1354,1366c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_tag"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,tag_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info_tag ORDER BY id LIMIT ?,? ) " + + "g, config_info_tag t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:957,964c +3:957,964c + org.springframework.data.domain.Page sPage = configInfoTagRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoTagWrapperMapStruct.INSTANCE.convertConfigInfoTagWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1372,1414c + // assert dataids group not null + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + // if dataids empty return empty list + if (CollectionUtils.isEmpty(dataIds)) { + return Collections.emptyList(); + } + + // Batch query limit + // The number of in is controlled within 100, the shorter the length of the SQL statement, the better + if (subQueryLimit > QUERY_LIMIT_SIZE) { + subQueryLimit = 50; + } + List result = new ArrayList(dataIds.size()); + + String sqlStart = "select data_id, group_id, tenant_id, app_name, content from config_info where group_id = ? and tenant_id = ? and data_id in ("; + String sqlEnd = ")"; + StringBuilder subQuerySql = new StringBuilder(); + + for (int i = 0; i < dataIds.size(); i += subQueryLimit) { + // dataids + List params = new ArrayList( + dataIds.subList(i, i + subQueryLimit < dataIds.size() ? i + subQueryLimit : dataIds.size())); + + for (int j = 0; j < params.size(); j++) { + subQuerySql.append("?"); + if (j != params.size() - 1) { + subQuerySql.append(","); + } + } + + // group + params.add(0, group); + params.add(1, tenantTmp); + + List r = this.jt + .query(sqlStart + subQuerySql.toString() + sqlEnd, params.toArray(), CONFIG_INFO_ROW_MAPPER); + + // assert not null + if (r != null && r.size() > 0) { + result.addAll(r); + } + } + return result; +2:970c +3:970c + return null; +====1 +1:1420,1463c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + if (StringUtils.isBlank(appName)) { + return this.findAllConfigInfo(pageNo, pageSize, tenantTmp); + } else { + return this.findConfigInfoByApp(pageNo, pageSize, tenantTmp, appName); + } + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + where += " and tenant_id like ? "; + params.add(generateLikeArgument(tenantTmp)); + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:976c +3:976c + return null; +====1 +1:1469,1562c + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + for (ConfigKey configInfo : configKeys) { + String dataId = configInfo.getDataId(); + String group = configInfo.getGroup(); + String appName = configInfo.getAppName(); + + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + return helper.fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:982c +3:982c + return null; +====1 +1:1572,1636c + PaginationHelper helper = createPaginationHelper(); + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info"; + StringBuilder where = new StringBuilder(" where "); + List params = new ArrayList(); + params.add(generateLikeArgument(tenantTmp)); + if (StringUtils.isNotBlank(configTags)) { + sqlCountRows = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id "; + sqlFetchRows = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join config_tags_relation b on a.id=b.id "; + + where.append(" a.tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and a.data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and a.group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and a.app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and a.content like ? "); + params.add(generateLikeArgument(content)); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + params.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and content like ? "); + params.add(generateLikeArgument(content)); + } + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:992,1008c +3:992,1008c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.like(tenant)); + } + if (StringUtils.isNotBlank(content)) { + booleanBuilder.and(qConfigInfo.content.like(content)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1642,1672c + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + throw new IOException("invalid param"); + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,content from config_info where "; + String where = " 1=1 and tenant_id='' "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1014c +3:1014c + return null; +====1 +1:1678,1691c + String sql = "SELECT id,data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; + + try { + return this.jt + .queryForObject(sql, new Object[] {dataId, group, tenantTmp, datumId}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + // EmptyResultDataAccessException, indicating that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); +2:1020,1026c +3:1020,1026c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); +====1 +1:1692a +2:1028,1036c +3:1028,1036c + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenantTmp)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } + + ConfigInfoAggrEntity configInfoAggrEntity = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggr(configInfoAggrEntity); +====1 +1:1697,1710c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "SELECT data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? ORDER BY datum_id"; + + try { + return this.jt.query(sql, new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1041c +3:1041c + return null; +====1 +1:1717,1730c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where data_id=? and " + + "group_id=? and tenant_id=? order by datum_id limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, new Object[] {dataId, group, tenantTmp}, sqlFetchRows, + new Object[] {dataId, group, tenantTmp, (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_AGGR_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1048,1058c +3:1048,1058c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + org.springframework.data.domain.Page sPage = configInfoAggrRepository.findAll( + qConfigInfoAggr.dataId.eq(dataId).and(qConfigInfoAggr.groupId.eq(group)) + .and(qConfigInfoAggr.tenantId.eq(tenantTmp)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.by("datumId")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggrList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1737,1831c + String sqlCountRows = "select count(*) from config_info_aggr where "; + String sqlFetchRows = "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + + for (ConfigKey configInfoAggr : configKeys) { + String dataId = configInfoAggr.getDataId(); + String group = configInfoAggr.getGroup(); + String appName = configInfoAggr.getAppName(); + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + Page result = helper + .fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_AGGR_ROW_MAPPER); + return result; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1065c +3:1065c + return null; +====1 +1:1836,1848c + String sql = "SELECT DISTINCT data_id, group_id, tenant_id FROM config_info_aggr"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_CHANGED_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1070,1071c +3:1070,1071c + List list = configInfoAggrRepository.findAllAggrGroup(); + return ConfigInfoChangedMapStruct.INSTANCE.convertConfigInfoChangedList(list); +====1 +1:1853,1864c + String sql = "SELECT datum_id FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND content = ? "; + + try { + return this.jt.queryForList(sql, new Object[] {dataId, groupId, content}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1076c +3:1076c + return null; +====1 +1:1869,1877c + try { + List> list = jt.queryForList( + "SELECT data_id, group_id, tenant_id, app_name, content, gmt_modified FROM config_info WHERE gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertChangeConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1081,1084c +3:1081,1084c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Iterable iterable = configInfoRepository + .findAll(qConfigInfo.gmtModified.goe(startTime).and(qConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList((List) iterable); +====1 +1:1884,1924c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_modified from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + if (!StringUtils.isBlank(tenantTmp)) { + where += " and tenant_id = ? "; + params.add(tenantTmp); + } + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (startTime != null) { + where += " and gmt_modified >=? "; + params.add(startTime); + } + if (endTime != null) { + where += " and gmt_modified <=? "; + params.add(endTime); + } + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + lastMaxId, CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1091c +3:1091c + return null; +====1 +1:1929,1937c + try { + List> list = jt.queryForList( + "SELECT DISTINCT data_id, group_id, tenant_id FROM his_config_info WHERE op_type = 'D' AND gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertDeletedConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1096,1100c +3:1096,1100c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository.findAll( + qHisConfigInfo.opType.eq("D").and(qHisConfigInfo.gmtModified.goe(startTime)) + .and(qHisConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList((List) iterable); +====1 +1:1943,1947c + final String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + final String tenantTmp = + StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + +2:1105a +3:1105a +====1 +1:1953,1960c + + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + + KeyHolder keyHolder = new GeneratedKeyHolder(); + + final String sql = + "INSERT INTO config_info(data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_create," + + "gmt_modified,c_desc,c_use,effect,type,c_schema) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; +2:1111,1120c +3:1111,1120c + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setGmtCreate(time); + configInfoEntity.setGmtModified(time); +====1 +1:1963,1991c + jt.update(new PreparedStatementCreator() { + @Override + public PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + PreparedStatement ps = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS); + ps.setString(1, configInfo.getDataId()); + ps.setString(2, configInfo.getGroup()); + ps.setString(3, tenantTmp); + ps.setString(4, appNameTmp); + ps.setString(5, configInfo.getContent()); + ps.setString(6, md5Tmp); + ps.setString(7, srcIp); + ps.setString(8, srcUser); + ps.setTimestamp(9, time); + ps.setTimestamp(10, time); + ps.setString(11, desc); + ps.setString(12, use); + ps.setString(13, effect); + ps.setString(14, type); + ps.setString(15, schema); + return ps; + } + }, keyHolder); + Number nu = keyHolder.getKey(); + if (nu == null) { + throw new IllegalArgumentException("insert config_info fail"); + } + return nu.longValue(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:1123,1124c +3:1123,1124c + return configInfoRepository.save(configInfoEntity).getId(); + } catch (Exception e) { +====1 +1:2008,2015c + try { + jt.update( + "INSERT INTO config_tags_relation(id,tag_name,tag_type,data_id,group_id,tenant_id) VALUES(?,?,?,?,?,?)", + configId, tagName, null, dataId, group, tenant); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1141,1147c +3:1141,1147c + ConfigTagsRelationEntity configTagsRelation = new ConfigTagsRelationEntity(); + configTagsRelation.setId(configId); + configTagsRelation.setTagName(tagName); + configTagsRelation.setDataId(dataId); + configTagsRelation.setGroupId(group); + configTagsRelation.setTenantId(tenant); + configTagsRelationRepository.save(configTagsRelation); +====1 +1:2020,2025c + try { + jt.update("DELETE FROM config_tags_relation WHERE id=?", id); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1152c +3:1152c + configTagsRelationRepository.findById(id).ifPresent(s -> configTagsRelationRepository.delete(s)); +====1 +1:2030,2040c + String sql = "SELECT tag_name FROM config_tags_relation WHERE tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1157c +3:1157c + return null; +====1 +1:2045,2055c + String sql = "SELECT tag_name FROM config_tags_relation WHERE data_id=? AND group_id=? AND tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {dataId, group, tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1162,1176c +3:1162,1176c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigTagsRelationEntity qConfigTagsRelation = QConfigTagsRelationEntity.configTagsRelationEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigTagsRelation.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigTagsRelation.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigTagsRelation.tenantId.eq(tenant)); + } + Iterable iterable = configTagsRelationRepository.findAll(booleanBuilder); + List result = new ArrayList<>(); + iterable.forEach(s -> result.add(s.getTagName())); + return result; +====1 +1:2061,2067c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + jt.update("DELETE FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, group, + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1182,1187c +3:1182,1187c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); +====1 +1:2068a +2:1189,1190c +3:1189,1190c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + configInfos.forEach(s -> configInfoRepository.delete(s)); +====1 +1:2076,2077c + StringBuilder sql = new StringBuilder(SQL_DELETE_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1198,1200c +3:1198,1200c + if (StringUtils.isBlank(ids)) { + return; + } +====1 +1:2081,2084c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1203a +3:1203a +====1 +1:2087,2093c + sql.append(") "); + try { + jt.update(sql.toString(), paramList.toArray()); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1206,1219c +3:1206,1219c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + for (Long id : paramList) { + configInfoRepository.deleteById(id); + } + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2099,2106c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String tagTmp = StringUtils.isBlank(tag) ? StringUtils.EMPTY : tag; + try { + jt.update("DELETE FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", dataId, + group, tenantTmp, tagTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1225,1234c +3:1225,1234c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); +====1 +1:2107a +2:1236,1251c +3:1236,1251c + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + Iterable configInfoTags = configInfoTagRepository.findAll(booleanBuilder); + configInfoTags.forEach(s -> configInfoTagRepository.delete(s)); + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2113,2115c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String tenantTmp = StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:1257,1258c +3:1257,1258c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:2122,2131c + try { + jt.update("UPDATE config_info SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?," + + "app_name=?,c_desc=?,c_use=?,effect=?,type=?,c_schema=? " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5Tmp, srcIp, srcUser, + time, appNameTmp, desc, use, effect, type, schema, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1265,1272c +3:1265,1272c + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setGmtModified(time); + configInfoRepository.save(configInfoEntity); +====1 +1:2139,2140c + StringBuilder sql = new StringBuilder(SQL_FIND_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1279a +3:1279a +====1 +1:2144,2147c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1282a +3:1282a +====1 +1:2150,2158c + sql.append(") "); + try { + return this.jt.query(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1285,1288c +3:1285,1288c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + List list = (List) configInfoRepository + .findAll(qConfigInfo.id.in(paramList)); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(list); +====1 +1:2163,2176c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAdvanceInfo configAdvance = this.jt.queryForObject( + "SELECT gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_ADVANCE_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1293,1314c +3:1293,1314c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAdvanceInfo configAdvance = ConfigAdvanceInfoMapStruct.INSTANCE.convertConfigAdvanceInfo(configInfo); + List configTagList = this.selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2178c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1315a +3:1315a +====1 +1:2180,2185c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1317c +3:1317c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2186a +2:1319c +3:1319c + return configAdvance; +====1 +1:2191,2206c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAllInfo configAdvance = this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5," + + "gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_ALL_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1324,1346c +3:1324,1346c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAllInfo configAdvance = ConfigAllInfoMapStruct.INSTANCE.convertConfigAllInfo(configInfo); + configAdvance.setGroup(configInfo.getGroupId()); + List configTagList = selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2208c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1347a +3:1347a +====1 +1:2210,2215c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1349c +3:1349c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2216a +2:1351c +3:1351c + return configAdvance; +====1 +1:2225,2233c + try { + jt.update( + "INSERT INTO his_config_info (id,data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_modified,op_type) " + + "VALUES(?,?,?,?,?,?,?,?,?,?,?)", id, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp, appNameTmp, configInfo.getContent(), md5Tmp, srcIp, srcUser, time, ops); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1360,1373c +3:1360,1373c + HisConfigInfoEntity hisConfigInfo = new HisConfigInfoEntity(); + hisConfigInfo.setId(id); + hisConfigInfo.setDataId(configInfo.getDataId()); + hisConfigInfo.setGroupId(configInfo.getGroup()); + hisConfigInfo.setAppName(appNameTmp); + hisConfigInfo.setContent(configInfo.getContent()); + hisConfigInfo.setMd5(md5Tmp); + hisConfigInfo.setGmtModified(time); + hisConfigInfo.setSrcUser(srcUser); + hisConfigInfo.setSrcIp(srcIp); + hisConfigInfo.setOpType(ops); + hisConfigInfo.setTenantId(tenantTmp); + hisConfigInfo.setGmtCreate(time); + hisConfigInfoRepository.save(hisConfigInfo); +====1 +1:2239,2255c + PaginationHelper helper = createPaginationHelper(); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from his_config_info where data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select nid,data_id,group_id,tenant_id,app_name,src_ip,src_user,op_type,gmt_create,gmt_modified from his_config_info " + + "where data_id = ? and group_id = ? and tenant_id = ? order by nid desc"; + + Page page = null; + try { + page = helper + .fetchPage(sqlCountRows, sqlFetchRows, new Object[] {dataId, group, tenantTmp}, pageNo, pageSize, + HISTORY_LIST_ROW_MAPPER); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG + .error("[list-config-history] error, dataId:{}, group:{}", new Object[] {dataId, group}, e); + throw e; + } +2:1379,1389c +3:1379,1389c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + org.springframework.data.domain.Page sPage = hisConfigInfoRepository.findAll( + qHisConfigInfo.dataId.eq(dataId).and(qHisConfigInfo.groupId.eq(group)) + .and(qHisConfigInfo.tenantId.eq(tenant)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("nid")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); +====1 +1:2262,2270c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "INSERT INTO app_configdata_relation_subs(data_id,group_id,app_name,gmt_modified) VALUES(?,?,?,?)", + dataId, group, appNameTmp, date); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1395a +3:1395a +====1 +1:2276,2284c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "UPDATE app_configdata_relation_subs SET gmt_modified=? WHERE data_id=? AND group_id=? AND app_name=?", + time, dataId, group, appNameTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1401c +3:1401c + +==== +1:2289c + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; +2:1406,1434c + <<<<<<< HEAD + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + ||||||| a41d209d5 + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[list-config-history] error, nid:{}", new Object[] {nid}, e); + throw e; + } + ======= + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[detail-config-history] error, nid:{}", new Object[] {nid}, e); + throw e; + } + } + + @Override + public ConfigHistoryInfo detailPreviousConfigHistory(Long id) { + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = (select max(nid) from his_config_info where id = ?) "; +3:1406,1413c + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + } + + @Override + public ConfigHistoryInfo detailPreviousConfigHistory(Long id) { + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = (select max(nid) from his_config_info where id = ?) "; +====1 +1:2292c + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); +2:1437c +3:1416c + .queryForObject(sqlFetchRows, new Object[] {id}, HISTORY_DETAIL_ROW_MAPPER); +====1 +1:2295c + LogUtil.FATAL_LOG.error("[list-config-history] error, nid:{}", new Object[] {nid}, e); +2:1440c +3:1419c + LogUtil.FATAL_LOG.error("[detail-previous-config-history] error, id:{}", new Object[] {id}, e); +====2 +1:2297a +3:1421a +2:1443c + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:2303,2310c + try { + jt.update( + "INSERT INTO tenant_info(kp,tenant_id,tenant_name,tenant_desc,create_source,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?)", + kp, tenantId, tenantName, tenantDesc, createResoure, time, time); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1449,1457c +3:1427,1435c + TenantInfoEntity tenantInfo = new TenantInfoEntity(); + tenantInfo.setKp(kp); + tenantInfo.setTenantId(tenantId); + tenantInfo.setTenantName(tenantName); + tenantInfo.setTenantDesc(tenantDesc); + tenantInfo.setCreateSource(createResoure); + tenantInfo.setGmtCreate(time); + tenantInfo.setGmtModified(time); + tenantInfoRepository.save(tenantInfo); +====1 +1:2315,2322c + try { + jt.update( + "UPDATE tenant_info SET tenant_name = ?, tenant_desc = ?, gmt_modified= ? WHERE kp=? AND tenant_id=?", + tenantName, tenantDesc, System.currentTimeMillis(), kp, tenantId); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1462,1467c +3:1440,1445c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + tenantInfoRepository.findOne(qTenantInfo.kp.eq(kp).and(qTenantInfo.tenantId.eq(tenantId))).ifPresent(s -> { + s.setTenantName(tenantName); + s.setTenantDesc(tenantDesc); + tenantInfoRepository.save(s); + }); +====1 +1:2327,2338c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=?"; + try { + return this.jt.query(sql, new Object[] {kp}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1472,1473c +3:1450,1451c + List list = tenantInfoRepository.findByKp(kp); + return TenantInfoMapStruct.INSTANCE.convertTenantInfoList(list); +====1 +1:2343,2354c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=? AND tenant_id=?"; + try { + return jt.queryForObject(sql, new Object[] {kp, tenantId}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1478,1479c +3:1456,1457c + TenantInfoEntity tenantInfoEntity = tenantInfoRepository.findByKpAndTenantId(kp, tenantId); + return TenantInfoMapStruct.INSTANCE.convertTenantInfo(tenantInfoEntity); +====1 +1:2359,2364c + try { + jt.update("DELETE FROM tenant_info WHERE kp=? AND tenant_id=?", kp, tenantId); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1484,1485c +3:1462,1463c + tenantInfoRepository.findOne(QTenantInfoEntity.tenantInfoEntity.tenantId.eq(tenantId) + .and(QTenantInfoEntity.tenantInfoEntity.kp.eq(kp))).ifPresent(s -> tenantInfoRepository.delete(s)); +====1 +1:2418,2431c + String sqlCountRows = " SELECT COUNT(*) FROM config_info "; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,md5,type,gmt_modified FROM " + + "( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) g, config_info t WHERE g.id = t.id"; + PaginationHelper helper = createPaginationHelper(); + try { + Page page = helper + .fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_WRAPPER_ROW_MAPPER); + + return page.getPageItems(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1539c +3:1517c + return null; +====1 +1:2448,2458c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,type,gmt_modified,md5 FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1556,1560c +3:1534,1538c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + ConfigInfoEntity result = configInfoRepository.findOne( + qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))).orElse(null); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapper(result); +====1 +1:2463,2469c + String sql = String.format("select 1 from %s limit 1", tableName); + try { + jt.queryForObject(sql, Integer.class); + return true; + } catch (Throwable e) { + return false; + } +2:1565c +3:1543c + return true; +====1 +1:2518,2530c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList<>(); + if (!CollectionUtils.isEmpty(ids)) { + where.append(" id in ("); + for (int i = 0; i < ids.size(); i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(ids.get(i)); + } + where.append(") "); +2:1614,1617c +3:1592,1595c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (!org.springframework.util.CollectionUtils.isEmpty(ids)) { + booleanBuilder.and(qConfigInfo.id.in(ids)); +====1 +1:2532,2536c + where.append(" tenant_id=? "); + paramList.add(tenantTmp); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + paramList.add(generateLikeArgument(dataId)); +2:1619,1623c +3:1597,1601c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.like(dataId)); +====1 +1:2539,2540c + where.append(" and group_id=? "); + paramList.add(group); +2:1626c +3:1604c + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:2543,2544c + where.append(" and app_name=? "); + paramList.add(appName); +2:1629c +3:1607c + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:2547,2552c + try { + return this.jt.query(SQL_FIND_ALL_CONFIG_INFO + where, paramList.toArray(), CONFIG_ALL_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1632,1640c +3:1610,1618c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + List resultList = new ArrayList<>(); + configInfos.forEach(s -> { + ConfigAllInfo configAllInfo = new ConfigAllInfo(); + BeanUtils.copyProperties(s, configAllInfo); + configAllInfo.setGroup(s.getGroupId()); + resultList.add(configAllInfo); + }); + return resultList; +====1 +1:2643,2647c + Integer result = this.jt + .queryForObject(SQL_TENANT_INFO_COUNT_BY_TENANT_ID, new String[] {tenantId}, Integer.class); + if (result == null) { + return 0; + } +2:1731,1732c +3:1709,1710c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + Long result = tenantInfoRepository.count(qTenantInfo.tenantId.eq(tenantId)); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_HistoryController.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_HistoryController.java.txt new file mode 100644 index 0000000000..b506cbd739 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_HistoryController.java.txt @@ -0,0 +1,120 @@ +====1 +1:31,33c + import javax.servlet.http.HttpServletRequest; + import javax.servlet.http.HttpServletResponse; + +2:30a +3:30a +====2 +1:42c +3:39c + +2:39c + +====2 +1:45c +3:42c + +2:42c + +====1 +1:49,53c + * @param dataId dataId string value. + * @param group group string value. + * @param tenant tenant string value. + * @param appName appName string value. + * @param pageNo pageNo string value. +2:46,50c +3:46,50c + * @param dataId dataId string value. + * @param group group string value. + * @param tenant tenant string value. + * @param appName appName string value. + * @param pageNo pageNo string value. +====2 +1:60,66c +3:57,63c + @RequestParam("group") String group, // + @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant, + @RequestParam(value = "appName", required = false) String appName, + @RequestParam(value = "pageNo", required = false) Integer pageNo, + // + @RequestParam(value = "pageSize", required = false) Integer pageSize, // + ModelMap modelMap) { +2:57,63c + @RequestParam("group") String group, // + @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant, + @RequestParam(value = "appName", required = false) String appName, + @RequestParam(value = "pageNo", required = false) Integer pageNo, + // + @RequestParam(value = "pageSize", required = false) Integer pageSize, // + ModelMap modelMap) { +====2 +1:73c +3:70c + +2:70c + +====1 +1:75c + * Query the detailed configuration history informations. +2:72,75c +3:72,75c + * Query the detailed configuration history information. + * + * @param nid history_config_info nid + * @return history config info +==== +1:78,79c + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { +2:78,86c + <<<<<<< HEAD + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { + ||||||| a41d209d5 + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { + ======= + public ConfigHistoryInfo getConfigHistoryInfo(@RequestParam("nid") Long nid) { + >>>>>>> TEMP_RIGHT_BRANCH +3:78c + public ConfigHistoryInfo getConfigHistoryInfo(@RequestParam("nid") Long nid) { +====2 +1:81a +3:80a +2:89,93c + <<<<<<< HEAD + + ||||||| a41d209d5 + + ======= +==== +1:82a +2:95,107c + /** + * Query previous config history information. + * + * @param id config_info id + * @return history config info + * @since 1.4.0 + */ + @GetMapping(value = "/previous") + public ConfigHistoryInfo getPreviousConfigHistoryInfo(@RequestParam("id") Long id) { + return persistService.detailPreviousConfigHistory(id); + } + + >>>>>>> TEMP_RIGHT_BRANCH +3:82,93c + /** + * Query previous config history information. + * + * @param id config_info id + * @return history config info + * @since 1.4.0 + */ + @GetMapping(value = "/previous") + public ConfigHistoryInfo getPreviousConfigHistoryInfo(@RequestParam("id") Long id) { + return persistService.detailPreviousConfigHistory(id); + } + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_MergeDatumService.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_MergeDatumService.java.txt new file mode 100644 index 0000000000..1d2e6e10d3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_MergeDatumService.java.txt @@ -0,0 +1,43 @@ +====1 +1:30,31c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:30,32c +3:30,32c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.ApplicationUtils; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:109c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIp()); +2:110c +3:110c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIP()); +====1 +1:117c + if (ApplicationUtils.getStandaloneMode()) { +2:118c +3:118c + if (EnvUtil.getStandaloneMode()) { +==== +1:166c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); +2:167,177c + <<<<<<< HEAD + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn( + "[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); + ||||||| a41d209d5 + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn("[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); + ======= + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIP(), null); +3:167c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIP(), null); +====2 +1:168a +3:169a +2:180c + >>>>>>> TEMP_RIGHT_BRANCH diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_MergeTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_MergeTaskProcessor.java.txt new file mode 100644 index 0000000000..5df60ac501 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_MergeTaskProcessor.java.txt @@ -0,0 +1,47 @@ +==== +1:19a +2:20,26c + <<<<<<< HEAD + import com.alibaba.nacos.common.task.AbstractDelayTask; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; + ======= + import com.alibaba.nacos.common.task.NacosTask; +3:20c + import com.alibaba.nacos.common.task.NacosTask; +==== +1:21c + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:28c + >>>>>>> TEMP_RIGHT_BRANCH +3:21a +====1 +1:22a +2:30c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:31c + import com.alibaba.nacos.core.utils.InetUtils; +2:39c +3:32c + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:55c + public boolean process(AbstractDelayTask task) { +2:63c +3:56c + public boolean process(NacosTask task) { +====1 +1:87c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +2:95c +3:88c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), +====1 +1:101c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +2:109c +3:102c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_NotifyTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_NotifyTaskProcessor.java.txt new file mode 100644 index 0000000000..9dcd6442e3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_NotifyTaskProcessor.java.txt @@ -0,0 +1,67 @@ +==== +1:19a +2:20,26c + <<<<<<< HEAD + import com.alibaba.nacos.common.task.AbstractDelayTask; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; + ======= + import com.alibaba.nacos.common.task.NacosTask; +3:20c + import com.alibaba.nacos.common.task.NacosTask; +==== +1:21c + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:28c + >>>>>>> TEMP_RIGHT_BRANCH +3:21a +====1 +1:22a +2:30c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:27,28c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:35,36c +3:28,29c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:49c + public boolean process(AbstractDelayTask task) { +2:57c +3:50c + public boolean process(NacosTask task) { +====1 +1:76c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +2:84c +3:77c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====1 +1:78c + .format(URL_PATTERN, serverIp, ApplicationUtils.getContextPath(), dataId, group); +2:86c +3:79c + .format(URL_PATTERN, serverIp, EnvUtil.getContextPath(), dataId, group); +====1 +1:82c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:90c +3:83c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====1 +1:92c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:100c +3:93c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====1 +1:100c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:108c +3:101c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_application.properties.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_application.properties.txt new file mode 100644 index 0000000000..d78b420770 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_application.properties.txt @@ -0,0 +1,89 @@ +====1 +1:40,41c + # db.user=nacos + # db.password=nacos +2:40,41c +3:40,41c + # db.user.0=nacos + # db.password.0=nacos +====1 +1:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-fe/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +2:112c +3:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-ui/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +==== +1:177a +2:178,214c + + <<<<<<< HEAD + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + + ||||||| a41d209d5 + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:178,211c + + + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_pom.xml.txt new file mode 100644 index 0000000000..99b5611a8e --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_histogram/diff_pom.xml.txt @@ -0,0 +1,169 @@ +====1 +1:25c + 1.4.0-SNAPSHOT +2:25c +3:25c + 1.4.1-SNAPSHOT +====1 +1:39c + nacos-all-1.4.0-SNAPSHOT +2:39c +3:39c + nacos-all-1.4.1-SNAPSHOT +====1 +1:129c + 2.1.16.RELEASE +2:129c +3:129c + 2.1.17.RELEASE +====1 +1:131c + 2.6 +2:130a +3:130a +====1 +1:133c + 2.2 +2:132c +3:132c + 2.6 +====1 +1:144c + 1.7.17 +2:142a +3:142a +====1 +1:170a +2:169,175c +3:169,175c + 1.3.2.beta1 + 1.3.2.beta1 + 1.3.1.Final + 19.3.0.0 + 4.2.1 + 3.4.1 + 1.18.12 +====1 +1:279a +2:285,287c +3:285,287c + **/com/alibaba/nacos/config/server/modules/entity/*.java + **/com/alibaba/nacos/config/server/modules/mapstruct/*.java + **/com/alibaba/nacos/config/server/configuration/datasource/DynamicDataSource.java +==== +1:307c + **/istio/model/**,**/nacos/test/** +2:315,321c + <<<<<<< HEAD + **/istio/model/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** + ||||||| a41d209d5 + **/istio/model/**,**/nacos/test/** + ======= + **/istio/model/**,**/consistency/entity/**,**/nacos/test/** + >>>>>>> TEMP_RIGHT_BRANCH +3:315c + **/istio/model/**,**/consistency/entity/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** +====1 +1:341a +2:356c +3:350c + /console-ui/** +====1 +1:553a +2:569,587c +3:563,581c + + remove-test-data + + + + org.apache.maven.plugins + maven-clean-plugin + + false + + + ${user.home}/nacos/data + + + + + + + +====1 +1:581a +2:616c +3:610c + sys +====1 +1:688a +2:724,728c +3:718,722c + + ${project.groupId} + nacos-sys + ${project.version} + +====1 +1:712,717c + + commons-lang + commons-lang + ${commons-lang.version} + + +2:751a +3:745a +====1 +1:817,822c + + com.ning + async-http-client + ${async-http-client.version} + + +2:850a +3:844a +====1 +1:1027a +2:1056,1093c +3:1050,1087c + + + org.mapstruct + mapstruct-jdk8 + ${mapstruct.version} + + + + org.mapstruct + mapstruct-processor + ${mapstruct.version} + + + + org.projectlombok + lombok + true + ${lombok.version} + + + + com.querydsl + querydsl-jpa + ${querydsl.version} + + + + com.zaxxer + HikariCP + ${hikariCP.version} + + + + com.oracle.ojdbc + ojdbc8 + ${ojdbc.version} + + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_AsyncNotifyService.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_AsyncNotifyService.java.txt new file mode 100644 index 0000000000..f5fe26eb1d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_AsyncNotifyService.java.txt @@ -0,0 +1,83 @@ +====1 +1:35,36c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:35,36c +3:35,36c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:107c + +2:107c +3:107c + +====1 +1:130c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +2:130c +3:130c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +====2 +1:135a +3:135a +2:136,143c + <<<<<<< HEAD + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, + String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); + ||||||| a41d209d5 + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); + ======= +==== +1:137c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +2:145,146c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); + >>>>>>> TEMP_RIGHT_BRANCH +3:137c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====1 +1:157c + +2:166c +3:157c + +====1 +1:171c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +2:180c +3:171c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +====1 +1:177c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +2:186c +3:177c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +====1 +1:199c + InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +2:208c +3:199c + InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +====1 +1:262c + this.url = MessageFormat.format(URL_PATTERN, target, ApplicationUtils.getContextPath(), dataId, group); +2:271c +3:262c + this.url = MessageFormat.format(URL_PATTERN, target, EnvUtil.getContextPath(), dataId, group); +====1 +1:265c + .format(URL_PATTERN_TENANT, target, ApplicationUtils.getContextPath(), dataId, group, tenant); +2:274c +3:265c + .format(URL_PATTERN_TENANT, target, EnvUtil.getContextPath(), dataId, group, tenant); +====1 +1:312c + } +\ No newline at end of file +2:321c +3:312c + } diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_ConfigController.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_ConfigController.java.txt new file mode 100644 index 0000000000..aff019c609 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_ConfigController.java.txt @@ -0,0 +1,200 @@ +====1 +1:18a +2:19c +3:19c + import com.alibaba.nacos.api.config.ConfigType; +====1 +1:22a +2:24c +3:24c + import com.alibaba.nacos.common.model.RestResultUtils; +==== +1:34a +2:37,38c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; + <<<<<<< HEAD +3:37c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +====2 +1:35a +3:38a +2:40,43c + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.result.ResultBuilder; + ======= + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:38c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +2:45a +3:40a +====1 +1:45a +2:53c +3:48c + import com.alibaba.nacos.common.utils.NamespaceUtil; +====1 +1:48c + import com.alibaba.nacos.core.utils.InetUtils; +2:56c +3:51c + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:93,94c + private static final String NAMESPACE_PUBLIC_KEY = "public"; + +2:100a +3:95a +====1 +1:137a +2:144,147c +3:139,142c + //check type + if (!ConfigType.isValidType(type)) { + type = ConfigType.getDefaultType().getType(); + } +====1 +1:178c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:188c +3:183c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:199c + tenant = processTenant(tenant); +2:209c +3:204c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====1 +1:283c + return ResultBuilder.buildSuccessResult(true); +2:293c +3:288c + return RestResultUtils.success(true); +====1 +1:472c + tenant = processTenant(tenant); +2:482c +3:477c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====1 +1:527c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:537c +3:532c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +==== +1:530,534c + if (StringUtils.isNotBlank(namespace)) { + if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); + } +2:540,543c + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +3:535,538c + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====1 +1:535a +2:545c +3:540c + +====1 +1:548c + return ResultBuilder.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +2:558c +3:553c + return RestResultUtils.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +====1 +1:560c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +2:570c +3:565c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +====1 +1:584c + return ResultBuilder.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +2:594c +3:589c + return RestResultUtils.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +====1 +1:588c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:598c +3:593c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:601c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:611c +3:606c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:604c + return ResultBuilder.buildSuccessResult("导入成功", saveResult); +2:614c +3:609c + return RestResultUtils.success("导入成功", saveResult); +====1 +1:628c + return ResultBuilder.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +2:638c +3:633c + return RestResultUtils.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +==== +1:631,634c + + if (NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(namespace)) { + namespace = ""; + } else if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { +2:641,643c + + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { +3:636,638c + + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { +====1 +1:636c + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +2:645c +3:640c + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====1 +1:650c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:659c +3:654c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:674c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:683c +3:678c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:687c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:696c +3:691c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:690,697c + return ResultBuilder.buildSuccessResult("Clone Completed Successfully", saveResult); + } + + private String processTenant(String tenant) { + if (StringUtils.isEmpty(tenant) || NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(tenant)) { + return ""; + } + return tenant; +2:699c +3:694c + return RestResultUtils.success("Clone Completed Successfully", saveResult); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_DiskUtils.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_DiskUtils.java.txt new file mode 100644 index 0000000000..edbfb2552d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_DiskUtils.java.txt @@ -0,0 +1,24 @@ +356,364d355 +< <<<<<<< HEAD:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileOutputStream fos = new FileOutputStream( +< outputFile); final CheckedOutputStream cos = new CheckedOutputStream(fos, +< checksum); final ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(cos))) { +< ||||||| a41d209d5:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileOutputStream fos = new FileOutputStream( +< outputFile); final CheckedOutputStream cos = new CheckedOutputStream(fos, checksum); +< final ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(cos))) { +< ======= +368d358 +< >>>>>>> TEMP_RIGHT_BRANCH:sys/src/main/java/com/alibaba/nacos/sys/utils/DiskUtils.java +407,415d396 +< <<<<<<< HEAD:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileInputStream fis = new FileInputStream( +< sourceFile); final CheckedInputStream cis = new CheckedInputStream(fis, +< checksum); final ZipInputStream zis = new ZipInputStream(new BufferedInputStream(cis))) { +< ||||||| a41d209d5:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileInputStream fis = new FileInputStream( +< sourceFile); final CheckedInputStream cis = new CheckedInputStream(fis, checksum); +< final ZipInputStream zis = new ZipInputStream(new BufferedInputStream(cis))) { +< ======= +419d399 +< >>>>>>> TEMP_RIGHT_BRANCH:sys/src/main/java/com/alibaba/nacos/sys/utils/DiskUtils.java diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_ExternalDataSourceServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_ExternalDataSourceServiceImpl.java.txt new file mode 100644 index 0000000000..12f8b7755c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_ExternalDataSourceServiceImpl.java.txt @@ -0,0 +1,194 @@ +====1 +1:19,39c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import javax.sql.DataSource; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + +2:18a +3:18a +==== +1:40a +2:20,70c + <<<<<<< HEAD + import com.alibaba.nacos.common.utils.StringUtils; + import com.alibaba.nacos.config.server.monitor.MetricsMonitor; + import com.alibaba.nacos.config.server.utils.PropertyUtil; + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.zaxxer.hikari.HikariDataSource; + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + ||||||| a41d209d5 + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import javax.sql.DataSource; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import com.alibaba.nacos.common.utils.ConvertUtils; + import com.alibaba.nacos.common.utils.StringUtils; + import com.alibaba.nacos.config.server.monitor.MetricsMonitor; + import com.alibaba.nacos.config.server.utils.ConfigExecutor; + import com.alibaba.nacos.config.server.utils.PropertyUtil; + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.zaxxer.hikari.HikariDataSource; + ======= + import com.alibaba.nacos.common.utils.IPUtil; +3:20c + import com.alibaba.nacos.common.utils.IPUtil; +====1 +1:45c + import com.alibaba.nacos.core.utils.ApplicationUtils; +2:75c +3:25c + import com.alibaba.nacos.sys.env.EnvUtil; +==== +1:46a +2:77,92c + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + >>>>>>> TEMP_RIGHT_BRANCH + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +3:27,41c + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +====1 +1:55,58c + private static final Logger LOGGER = LoggerFactory.getLogger(ExternalDataSourceServiceImpl.class); + + private static final String JDBC_DRIVER_NAME = "com.mysql.cj.jdbc.Driver"; + +2:100a +3:49a +====1 +1:86,87c + private static Pattern ipPattern = Pattern.compile("\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}"); + +2:127a +3:76a +====1 +1:120,122c + + ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +2:160,162c +3:109,111c + + // ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + // ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +====1 +1:130c + .build(ApplicationUtils.getEnvironment(), (dataSource) -> { +2:170c +3:119c + .build(EnvUtil.getEnvironment(), (dataSource) -> { +====1 +1:193c + return "DOWN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +2:233c +3:182c + return "DOWN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====1 +1:196c + return "WARN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +2:236c +3:185c + return "WARN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====1 +1:204,217c + private String getIpFromUrl(String url) { + + Matcher m = ipPattern.matcher(url); + if (m.find()) { + return m.group(); + } + + return ""; + } + + static String defaultIfNull(String value, String defaultValue) { + return null == value ? defaultValue : value; + } + +2:243a +3:192a +====1 +1:272c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +2:298c +3:247c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); +====1 +1:275c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +2:301c +3:250c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_ExternalPermissionPersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_ExternalPermissionPersistServiceImpl.java.txt new file mode 100644 index 0000000000..3c95b4eb48 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_ExternalPermissionPersistServiceImpl.java.txt @@ -0,0 +1,215 @@ +==== +1:21c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +2:21,31c + <<<<<<< HEAD + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + ======= +3:20a +====1 +1:22a +2:33c +3:22c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +==== +1:24a +2:36c + >>>>>>> TEMP_RIGHT_BRANCH +3:25,28c + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; +====1 +1:27,28c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; +2:39c +3:31c + import org.springframework.data.domain.PageRequest; +====2 +1:30a +3:33a +2:42,49c + <<<<<<< HEAD + ||||||| a41d209d5 + import javax.annotation.PostConstruct; + import java.util.ArrayList; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.PERMISSION_ROW_MAPPER; + + ======= +====1 +1:32a +2:52,53c +3:36,37c + import java.util.Collections; + import java.util.List; +====2 +1:35a +3:40a +2:57c + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:46,53c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:68c +3:51c + private PermissionsRepository permissionsRepository; +==== +1:56,83c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role='" + role + "' "; + + if (StringUtils.isBlank(role)) { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:71,140c + <<<<<<< HEAD + org.springframework.data.domain.Page sPage = permissionsRepository + .findAll(QPermissionsEntity.permissionsEntity.role.eq(role), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(PermissionsMapStruct.INSTANCE.convertPermissionInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + ||||||| a41d209d5 + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role='" + role + "' "; + + if (StringUtils.isBlank(role)) { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + ======= + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(role)) { + params = Collections.singletonList(role); + } else { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:54,61c + org.springframework.data.domain.Page sPage = permissionsRepository + .findAll(QPermissionsEntity.permissionsEntity.role.eq(role), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(PermissionsMapStruct.INSTANCE.convertPermissionInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:94,102c + + String sql = "INSERT into permissions (role, resource, action) VALUES (?, ?, ?)"; + + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:151,152c +3:72,73c + + permissionsRepository.save(new PermissionsEntity(role, resource, action)); +====1 +1:113,120c + + String sql = "DELETE from permissions WHERE role=? and resource=? and action=?"; + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:163,167c +3:84,88c + + QPermissionsEntity qPermissions = QPermissionsEntity.permissionsEntity; + permissionsRepository.findOne( + qPermissions.role.eq(role).and(qPermissions.resource.eq(resource)).and(qPermissions.action.eq(action))) + .ifPresent(p -> permissionsRepository.delete(p)); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_ExternalRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_ExternalRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..b2b75323b7 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_ExternalRolePersistServiceImpl.java.txt @@ -0,0 +1,283 @@ +==== +1:21c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +2:21,31c + <<<<<<< HEAD + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + ======= +3:20a +====1 +1:22a +2:33c +3:22c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +==== +1:24a +2:36c + >>>>>>> TEMP_RIGHT_BRANCH +3:25,28c + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; +====1 +1:27,29c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.RowMapper; +2:39c +3:31c + import org.springframework.data.domain.PageRequest; +====2 +1:31a +3:33a +2:42,48c + <<<<<<< HEAD + ||||||| a41d209d5 + import javax.annotation.PostConstruct; + import java.sql.ResultSet; + import java.sql.SQLException; + import java.util.ArrayList; + ======= +==== +1:35a +2:53,54c + import java.util.Collections; + >>>>>>> TEMP_RIGHT_BRANCH +3:38c + import java.util.Collections; +====1 +1:36a +2:56c +3:40c + import java.util.stream.Collectors; +====1 +1:38c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.ROLE_INFO_ROW_MAPPER; +2:57a +3:41a +====1 +1:50,57c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:69c +3:53c + private RolesRepository rolesRepository; +====1 +1:61,81c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from (select distinct role from roles) roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " 1=1 "; + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + return pageInfo; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:73,80c +3:57,64c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +==== +1:86,104c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + where = " 1=1 "; + } + + try { + return helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:85,135c + <<<<<<< HEAD + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(QRolesEntity.rolesEntity.username.eq(username), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + ||||||| a41d209d5 + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + where = " 1=1 "; + } + + try { + return helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + ======= + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { + where = " 1=1 "; + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:69,76c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(QRolesEntity.rolesEntity.username.eq(username), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:110c + * @param role role string value. +2:141c +3:82c + * @param role role string value. +====1 +1:115,122c + String sql = "INSERT into roles (role, username) VALUES (?, ?)"; + + try { + jt.update(sql, role, userName); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:146c +3:87c + rolesRepository.save(new RolesEntity(userName, role)); +====1 +1:131,137c + String sql = "DELETE from roles WHERE role=?"; + try { + jt.update(sql, role); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:155,156c +3:96,97c + Iterable iterable = rolesRepository.findAll(QRolesEntity.rolesEntity.role.eq(role)); + rolesRepository.deleteAll(iterable); +====1 +1:143c + * @param role role string value. +2:162c +3:103c + * @param role role string value. +====1 +1:147,153c + String sql = "DELETE from roles WHERE role=? and username=?"; + try { + jt.update(sql, role, username); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:166,168c +3:107,109c + QRolesEntity qRoles = QRolesEntity.rolesEntity; + rolesRepository.findOne(qRoles.role.eq(role).and(qRoles.username.eq(username))) + .ifPresent(s -> rolesRepository.delete(s)); +==== +1:158,160c + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[]{role}, String.class); + return users; +2:173,185c + <<<<<<< HEAD + List rolesEntities = (List) rolesRepository + .findAll(QRolesEntity.rolesEntity.role.like(role)); + return rolesEntities.stream().map(s -> s.getRole()).collect(Collectors.toList()); + ||||||| a41d209d5 + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[]{role}, String.class); + return users; + ======= + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[] {role}, String.class); + return users; + >>>>>>> TEMP_RIGHT_BRANCH +3:114,116c + List rolesEntities = (List) rolesRepository + .findAll(QRolesEntity.rolesEntity.role.like(role)); + return rolesEntities.stream().map(s -> s.getRole()).collect(Collectors.toList()); +====1 +1:163,172c + private static final class RoleInfoRowMapper implements RowMapper { + + @Override + public RoleInfo mapRow(ResultSet rs, int rowNum) throws SQLException { + RoleInfo roleInfo = new RoleInfo(); + roleInfo.setRole(rs.getString("role")); + roleInfo.setUsername(rs.getString("username")); + return roleInfo; + } + } +2:187a +3:118a diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_ExternalStoragePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_ExternalStoragePersistServiceImpl.java.txt new file mode 100644 index 0000000000..44c1365407 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_ExternalStoragePersistServiceImpl.java.txt @@ -0,0 +1,3017 @@ +====1 +1:41,42c + import com.alibaba.nacos.config.server.service.datasource.DataSourceService; + import com.alibaba.nacos.config.server.service.datasource.DynamicDataSource; +2:41,74c +3:41,74c + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.HisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.QHisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QTenantInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.TenantInfoEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAdvanceInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAllInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigHistoryInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4BetaMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4TagMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoAggrMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoBetaWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoChangedMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoEntityMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoTagWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.TenantInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoAggrRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoBetaRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoTagRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigTagsRelationRepository; + import com.alibaba.nacos.config.server.modules.repository.HisConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.TenantInfoRepository; +====1 +1:48c + import com.google.common.collect.Lists; +2:80,81c +3:80,81c + import com.querydsl.core.BooleanBuilder; + import lombok.extern.slf4j.Slf4j; +====1 +1:50a +2:84,85c +3:84,85c + import org.springframework.beans.BeanUtils; + import org.springframework.beans.factory.annotation.Autowired; +====1 +1:55c + import org.springframework.dao.IncorrectResultSizeDataAccessException; +2:90,92c +3:90,92c + import org.springframework.data.domain.PageRequest; + import org.springframework.data.domain.Sort; + import org.springframework.data.jpa.domain.Specification; +====1 +1:57,61c + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.PreparedStatementCreator; + import org.springframework.jdbc.core.PreparedStatementSetter; + import org.springframework.jdbc.support.GeneratedKeyHolder; + import org.springframework.jdbc.support.KeyHolder; +2:93a +3:93a +====1 +1:70c + import javax.annotation.PostConstruct; +2:102,105c +3:102,105c + import javax.persistence.criteria.CriteriaBuilder; + import javax.persistence.criteria.CriteriaQuery; + import javax.persistence.criteria.Predicate; + import javax.persistence.criteria.Root; +====1 +1:72,75c + import java.sql.Connection; + import java.sql.PreparedStatement; + import java.sql.SQLException; + import java.sql.Statement; +2:106a +3:106a +====1 +1:81a +2:113c +3:113c + import java.util.stream.Collectors; +====1 +1:83,97c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ADVANCE_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ALL_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4TAG_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_AGGR_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BASE_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_CHANGED_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_KEY_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_DETAIL_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_LIST_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.TENANT_INFO_ROW_MAPPER; +2:114a +3:114a +====1 +1:104a +2:122c +3:122c + @Slf4j +====1 +1:110c + private DataSourceService dataSourceService; +2:128,129c +3:128,129c + @Autowired + private ConfigInfoRepository configInfoRepository; +====1 +1:112c + private static final String SQL_FIND_ALL_CONFIG_INFO = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,c_schema from config_info"; +2:131,132c +3:131,132c + @Autowired + private ConfigInfoBetaRepository configInfoBetaRepository; +====1 +1:114c + private static final String SQL_TENANT_INFO_COUNT_BY_TENANT_ID = "select count(1) from tenant_info where tenant_id = ?"; +2:134,135c +3:134,135c + @Autowired + private ConfigInfoTagRepository configInfoTagRepository; +====1 +1:116c + private static final String SQL_FIND_CONFIG_INFO_BY_IDS = "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5 FROM config_info WHERE "; +2:137,138c +3:137,138c + @Autowired + private ConfigTagsRelationRepository configTagsRelationRepository; +====1 +1:118c + private static final String SQL_DELETE_CONFIG_INFO_BY_IDS = "DELETE FROM config_info WHERE "; +2:140,141c +3:140,141c + @Autowired + private HisConfigInfoRepository hisConfigInfoRepository; +====1 +1:120c + private static final String PATTERN_STR = "*"; +2:143,144c +3:143,144c + @Autowired + private TenantInfoRepository tenantInfoRepository; +====1 +1:122c + private static final int QUERY_LIMIT_SIZE = 50; +2:146,147c +3:146,147c + @Autowired + private ConfigInfoAggrRepository configInfoAggrRepository; +====1 +1:124,126c + protected JdbcTemplate jt; + + protected TransactionTemplate tjt; +2:149,150c +3:149,150c + @Autowired + private TransactionTemplate tjt; +====1 +1:133,171c + /** + * init datasource. + */ + @PostConstruct + public void init() { + dataSourceService = DynamicDataSource.getInstance().getDataSource(); + + jt = getJdbcTemplate(); + tjt = getTransactionTemplate(); + } + + public boolean checkMasterWritable() { + return dataSourceService.checkMasterWritable(); + } + + public void setBasicDataSourceService(DataSourceService dataSourceService) { + this.dataSourceService = dataSourceService; + } + + public synchronized void reload() throws IOException { + this.dataSourceService.reload(); + } + + /** + * For unit testing. + */ + public JdbcTemplate getJdbcTemplate() { + return this.dataSourceService.getJdbcTemplate(); + } + + public TransactionTemplate getTransactionTemplate() { + return this.dataSourceService.getTransactionTemplate(); + } + + @SuppressWarnings("checkstyle:AbbreviationAsWordInName") + public String getCurrentDBUrl() { + return this.dataSourceService.getCurrentDbUrl(); + } + +2:156a +3:156a +====1 +1:174c + return new ExternalStoragePaginationHelperImpl(jt); +2:159c +3:159c + return null; +====1 +1:182,191c + boolean result = tjt.execute(status -> { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:167,183c +3:167,183c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfoEntity.getGroupId(), + configInfoEntity.getTenantId()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; + } + return Boolean.TRUE; +====1 +1:193c + return Boolean.TRUE; +2:184a +3:184a +====1 +1:202c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:193c +3:193c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:204,207c + jt.update("INSERT INTO config_info_beta(data_id,group_id,tenant_id,app_name,content,md5,beta_ips,src_ip," + + "src_user,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, appNameTmp, configInfo.getContent(), md5, betaIps, srcIp, srcUser, + time, time); +2:195,207c +3:195,207c + ConfigInfoBetaEntity configInfoBeta = new ConfigInfoBetaEntity(); + configInfoBeta.setDataId(configInfo.getDataId()); + configInfoBeta.setGroupId(configInfo.getGroup()); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setBetaIps(betaIps); + configInfoBeta.setMd5(md5); + configInfoBeta.setGmtCreate(time); + configInfoBeta.setGmtModified(time); + configInfoBeta.setSrcUser(srcUser); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setTenantId(tenantTmp); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:209c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:209c +3:209c + log.error("[db-error] " + e.toString(), e); +====1 +1:220c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:219a +3:219a +====1 +1:222,226c + jt.update( + "INSERT INTO config_info_tag(data_id,group_id,tenant_id,tag_id,app_name,content,md5,src_ip,src_user," + + "gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, tagTmp, appNameTmp, configInfo.getContent(), md5, srcIp, srcUser, + time, time); +2:221,234c +3:221,234c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoTagEntity configInfoTag = new ConfigInfoTagEntity(); + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:228c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:236c +3:236c + log.error("[db-error] " + e.toString(), e); +====1 +1:236,254c + boolean result = tjt.execute(status -> { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + /* + If the appName passed by the user is not empty, use the persistent user's appName, + otherwise use db; when emptying appName, you need to pass an empty string + */ + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // delete all tags and then recreate + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); +2:244,268c +3:244,268c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + // 用户传过来的appName不为空,则用持久化用户的appName,否则用db的;清空appName的时候需要传空串 + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + configInfo.setId(oldConfigInfo.getId()); + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // 删除所有tag,然后再重新创建 + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); + } + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:256,259c + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:270c +3:270c + return Boolean.TRUE; +====1 +1:261c + return Boolean.TRUE; +2:271a +3:271a +====1 +1:268c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); +2:277a +3:277a +====1 +1:270,275c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + try { + jt.update( + "UPDATE config_info_beta SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5, srcIp, srcUser, + time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp); +2:279,300c +3:279,300c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(configInfo.getDataId())) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(configInfo.getDataId())); + } + if (StringUtils.isNotBlank(configInfo.getGroup())) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(configInfo.getGroup())); + } + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenantTmp)); + } + ConfigInfoBetaEntity configInfoBeta = configInfoBetaRepository.findOne(booleanBuilder).orElse(null); + try { + String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setMd5(md5); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setSrcUser(srcUser); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:277c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:302c +3:302c + log.error("[db-error] " + e.toString(), e); +====1 +1:288,293c + try { + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + jt.update( + "UPDATE config_info_tag SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", configInfo.getContent(), md5, + srcIp, srcUser, time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp, tagTmp); +2:313,331c +3:313,331c + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + ConfigInfoTagEntity configInfoTag = configInfoTagRepository.findOne( + qConfigInfoTag.dataId.eq(configInfo.getDataId()).and(qConfigInfoTag.groupId.eq(configInfo.getGroup())) + .and(qConfigInfoTag.tenantId.eq(tenantTmp)).and(qConfigInfoTag.tagId.eq(tagTmp))) + .orElse(new ConfigInfoTagEntity()); + try { + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:295c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:333c +3:333c + log.error("[db-error] " + e.toString(), e); +====1 +1:323,330c + try { + jt.update( + "UPDATE config_info SET md5 = ? WHERE data_id=? AND group_id=? AND tenant_id=? AND gmt_modified=?", + md5, dataId, group, tenantTmp, lastTime); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:361,378c +3:361,378c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (lastTime != null) { + booleanBuilder.and(qConfigInfo.gmtModified.eq(lastTime)); + } + configInfoRepository.findOne(booleanBuilder).ifPresent(config -> { + config.setMd5(md5); + configInfoRepository.save(config); + }); +====1 +1:416,421c + tjt.execute(status -> { + try { + ConfigInfo configInfo = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo != null) { + jt.update("DELETE FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, + group, tenantTmp); +2:464,474c +3:464,474c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo4Beta configInfo4Beta = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo4Beta != null) { + configInfoBetaRepository.deleteById(configInfo4Beta.getId()); + } + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:423,425c + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:476c +3:476c + return Boolean.TRUE; +====1 +1:427c + return Boolean.TRUE; +2:477a +3:477a +====1 +1:439,442c + String select = "SELECT content FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + String insert = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + String update = "UPDATE config_info_aggr SET content = ? , gmt_modified = ? WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + +2:489,502c +3:489,502c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } +====1 +1:445,446c + String dbContent = jt + .queryForObject(select, new Object[] {dataId, group, tenantTmp, datumId}, String.class); +2:505c +3:505c + ConfigInfoAggrEntity result = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); +====1 +1:448c + if (dbContent != null && dbContent.equals(content)) { +2:507c +3:507c + if (result.getContent() != null && result.getContent().equals(content)) { +====1 +1:451c + return jt.update(update, content, now, dataId, group, tenantTmp, datumId) > 0; +2:510,513c +3:510,513c + result.setContent(content); + result.setGmtModified(now); + configInfoAggrRepository.save(result); + return true; +====1 +1:454c + return jt.update(insert, dataId, group, tenantTmp, datumId, appNameTmp, content, now) > 0; +2:516,526c +3:516,526c + ConfigInfoAggrEntity configInfoAggrEntity = new ConfigInfoAggrEntity(); + configInfoAggrEntity.setDataId(dataId); + configInfoAggrEntity.setGroupId(group); + configInfoAggrEntity.setDatumId(datumId); + configInfoAggrEntity.setContent(content); + configInfoAggrEntity.setGmtModified(now); + configInfoAggrEntity.setAppName(appNameTmp); + configInfoAggrEntity.setTenantId(tenantTmp); + configInfoAggrRepository.save(configInfoAggrEntity); + return true; + +====1 +1:465,466c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; +2:536a +3:536a +====1 +1:468,482c + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index++, tenantTmp); + ps.setString(index, datumId); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:537a +3:537a +====1 +1:487,502c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=?"; + + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index, tenantTmp); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:542,545c +3:542,545c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); +====1 +1:503a +2:547,553c +3:547,553c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + configInfoAggrRepository.findOne(booleanBuilder).ifPresent(aggr -> configInfoAggrRepository.delete(aggr)); +====1 +1:509,523c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final StringBuilder datumString = new StringBuilder(); + for (String datum : datumList) { + datumString.append("'").append(datum).append("',"); + } + datumString.deleteCharAt(datumString.length() - 1); + final String sql = + "delete from config_info_aggr where data_id=? and group_id=? and tenant_id=? and datum_id in (" + + datumString.toString() + ")"; + try { + jt.update(sql, dataId, group, tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:558a +3:558a +====1 +1:529,536c + String sql = "delete from his_config_info where gmt_modified < ? limit ?"; + PaginationHelper helper = createPaginationHelper(); + try { + helper.updateLimit(sql, new Object[] {startTime, limitSize}); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:564,567c +3:564,567c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository + .findAll(qHisConfigInfo.gmtModified.lt(startTime), PageRequest.of(0, limitSize)); + hisConfigInfoRepository.deleteAll(iterable); +====1 +1:541,542c + String sql = "SELECT COUNT(*) FROM his_config_info WHERE gmt_modified < ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {startTime}); +2:572,573c +3:572,573c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Long result = hisConfigInfoRepository.count(qHisConfigInfo.gmtModified.lt(startTime)); +====1 +1:551c + String sql = "SELECT max(id) FROM config_info"; +2:581a +3:581a +====1 +1:553c + return jt.queryForObject(sql, Long.class); +2:583,584c +3:583,584c + //TODO 关系型特性查询 + return configInfoRepository.findConfigMaxId(); +====1 +1:591,617c + try { + Boolean isReplaceOk = tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + String appNameTmp = appName == null ? "" : appName; + removeAggrConfigInfo(dataId, group, tenant); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + for (Map.Entry datumEntry : datumMap.entrySet()) { + jt.update(sql, dataId, group, tenantTmp, datumEntry.getKey(), appNameTmp, + datumEntry.getValue(), new Timestamp(System.currentTimeMillis())); + } + } catch (Throwable e) { + throw new TransactionSystemException("error in addAggrConfigInfo"); + } + return Boolean.TRUE; + } + }); + if (isReplaceOk == null) { + return false; + } + return isReplaceOk; + } catch (TransactionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:622c +3:622c + return true; +====1 +1:624,636c + String sql = "SELECT DISTINCT data_id, group_id FROM config_info"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:629c +3:629c + return null; +====1 +1:641,651c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,beta_ips FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO4BETA_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:634,647c +3:634,647c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenant)); + } + ConfigInfoBetaEntity configInfoBetaEntity = configInfoBetaRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoBeta data null")); + return ConfigInfo4BetaMapStruct.INSTANCE.convertConfigInfo4Beta(configInfoBetaEntity); +====1 +1:659,668c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,tag_id,app_name,content FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", + new Object[] {dataId, group, tenantTmp, tagTmp}, CONFIG_INFO4TAG_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:655,671c +3:655,671c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + ConfigInfoTagEntity result = configInfoTagRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoTag data null")); + return ConfigInfo4TagMapStruct.INSTANCE.convertConfigInfo4Tag(result); +====1 +1:674,684c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=? AND app_name=?", + new Object[] {dataId, group, tenantTmp, appName}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:677c +3:677c + return null; +====1 +1:690,733c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(group); + paramList.add(tenantTmp); + + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and group_id=? and tenant_id=? "); + if (StringUtils.isNotBlank(configTags)) { + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.group_id=? and a.tenant_id=? "); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sql.append(", "); + } + sql.append("?"); + paramList.add(tagArr[i]); + } + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return this.jt.queryForObject(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:683c +3:683c + return null; +====1 +1:739,748c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, StringUtils.EMPTY}, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:689,695c +3:689,695c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + return configInfoRepository.findOne(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group))).map(s -> { + ConfigInfoBase configInfoBase = new ConfigInfoBase(); + BeanUtils.copyProperties(s, configInfoBase); + configInfoBase.setGroup(s.getGroupId()); + return configInfoBase; + }).orElse(null); +====1 +1:753,762c + try { + return this.jt + .queryForObject("SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE ID=?", + new Object[] {id}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:700,701c +3:700,701c + + return null; +====1 +1:767,776c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5,type FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:706,712c +3:706,712c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:777a +2:714,718c +3:714,718c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity result = configInfoRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfo(result); +====1 +1:783,792c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:724,725c +3:724,725c + + return null; +====1 +1:798,807c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? and app_name=?", + new Object[] {dataId, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:731,732c +3:731,732c + + return null; +====1 +1:813,864c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where data_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:738c +3:738c + return null; +====1 +1:870,871c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); +2:743a +3:743a +====1 +1:874,922c + String sqlCount = "select count(*) from config_info"; + String sql = "select ID,data_id,group_id,tenant_id,app_name,content,type from config_info"; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id"; + sql = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id"; + + where.append(" a.tenant_id=? "); + + if (StringUtils.isNotBlank(dataId)) { + where.append(" and a.data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and a.group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and a.app_name=? "); + paramList.add(appName); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id=? "); + if (StringUtils.isNotBlank(dataId)) { + where.append(" and data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and app_name=? "); + paramList.add(appName); + } +2:746,765c +3:746,765c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + } + + private void buildConfigInfoCommonCondition(BooleanBuilder booleanBuilder, QConfigInfoEntity qConfigInfo, + final String dataId, final String group, final String appName) { + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); +====1 +1:924,929c + try { + return helper.fetchPage(sqlCount + where, sql + where, paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:767,771c +3:767,771c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(appName)) { + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:935,943c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:777c +3:777c + return null; +====1 +1:949,958c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:783c +3:783c + return null; +====1 +1:964,973c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=? and app_name =?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? and app_name =?", + new Object[] {group, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:789c +3:789c + return null; +====1 +1:979,1032c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder( + "select count(*) from config_info where group_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(group); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:795c +3:795c + return null; +====1 +1:1038,1047c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where tenant_id like ? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? and app_name=?", + new Object[] {generateLikeArgument(tenantTmp), appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:801c +3:801c + return null; +====1 +1:1053,1104c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where tenant_id like ? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:807c +3:807c + return null; +====1 +1:1110,1118c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:813c +3:813c + return null; +====1 +1:1123,1124c + String sql = " SELECT COUNT(ID) FROM config_info "; + Integer result = jt.queryForObject(sql, Integer.class); +2:818c +3:818c + Long result = configInfoRepository.count(); +====1 +1:1133,1134c + String sql = " SELECT COUNT(ID) FROM config_info where tenant_id like ?"; + Integer result = jt.queryForObject(sql, new Object[] {tenant}, Integer.class); +2:827,828c +3:827,828c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.tenantId.like(tenant)); +====1 +1:1143,1144c + String sql = " SELECT COUNT(ID) FROM config_info_beta "; + Integer result = jt.queryForObject(sql, Integer.class); +2:837c +3:837c + Long result = configInfoBetaRepository.count(); +====1 +1:1153,1154c + String sql = " SELECT COUNT(ID) FROM config_info_tag "; + Integer result = jt.queryForObject(sql, Integer.class); +2:846c +3:846c + Long result = configInfoTagRepository.count(); +====1 +1:1162,1165c + public List getTenantIdList(int page, int pageSize) { + String sql = "SELECT tenant_id FROM config_info WHERE tenant_id != '' GROUP BY tenant_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:854,864c +3:854,864c + public List getTenantIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("tenantId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1169,1172c + public List getGroupIdList(int page, int pageSize) { + String sql = "SELECT group_id FROM config_info WHERE tenant_id ='' GROUP BY group_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:868,878c +3:868,878c + public List getGroupIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("groupId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1178,1179c + String sql = " SELECT COUNT(ID) FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {dataId, group, tenantTmp}); +2:884,886c +3:884,886c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))); +====1 +1:1188,1213c + if (datumIds == null || datumIds.isEmpty()) { + return 0; + } + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder sql = new StringBuilder( + " SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ? and datum_id"); + if (isIn) { + sql.append(" in ("); + } else { + sql.append(" not in ("); + } + for (int i = 0, size = datumIds.size(); i < size; i++) { + if (i > 0) { + sql.append(", "); + } + sql.append("?"); + } + sql.append(")"); + + List objectList = Lists.newArrayList(dataId, group, tenantTmp); + objectList.addAll(datumIds); + Integer result = jt.queryForObject(sql.toString(), Integer.class, objectList.toArray()); + if (result == null) { + throw new IllegalArgumentException("aggrConfigInfoCount error"); + } + return result.intValue(); +2:895c +3:895c + return 0; +====1 +1:1228,1242c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5 " + + " FROM ( SELECT id FROM config_info WHERE tenant_id like ? ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, + new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:910c +3:910c + return null; +====1 +1:1247,1282c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String select = " SELECT data_id,group_id,app_name FROM ( " + + " SELECT id FROM config_info WHERE tenant_id LIKE ? ORDER BY id LIMIT ?, ? )" + + " g, config_info t WHERE g.id = t.id "; + + final int totalCount = configInfoCount(tenant); + int pageCount = totalCount / pageSize; + if (totalCount > pageSize * pageCount) { + pageCount++; + } + + if (pageNo > pageCount) { + return null; + } + + final Page page = new Page(); + page.setPageNumber(pageNo); + page.setPagesAvailable(pageCount); + page.setTotalCount(totalCount); + + try { + List result = jt + .query(select, new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, + // new Object[0], + CONFIG_KEY_ROW_MAPPER); + + for (ConfigKey item : result) { + page.getPageItems().add(item); + } + return page; + } catch (EmptyResultDataAccessException e) { + return page; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:915c +3:915c + return null; +====1 +1:1288,1300c + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,content,md5" + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) " + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:921c +3:921c + return null; +====1 +1:1305,1319c + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = " SELECT t.id,type,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + + List params = new ArrayList(); + + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, params.toArray(), pageNo, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:926c +3:926c + return null; +====1 +1:1324,1332c + String select = "SELECT id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,type from config_info where id > ? order by id asc limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(select, new Object[] {lastMaxId, 0, pageSize}, 1, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:931,940c +3:931,940c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(qConfigInfo.id.gt(lastMaxId), PageRequest.of(0, pageSize, Sort.by(Sort.Order.asc("id")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1337,1349c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_beta"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,beta_ips " + + " FROM ( SELECT id FROM config_info_beta ORDER BY id LIMIT ?,? )" + + " g, config_info_beta t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:945,952c +3:945,952c + org.springframework.data.domain.Page sPage = configInfoBetaRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoBetaWrapperMapStruct.INSTANCE.convertConfigInfoBetaWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1354,1366c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_tag"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,tag_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info_tag ORDER BY id LIMIT ?,? ) " + + "g, config_info_tag t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:957,964c +3:957,964c + org.springframework.data.domain.Page sPage = configInfoTagRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoTagWrapperMapStruct.INSTANCE.convertConfigInfoTagWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1372,1414c + // assert dataids group not null + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + // if dataids empty return empty list + if (CollectionUtils.isEmpty(dataIds)) { + return Collections.emptyList(); + } + + // Batch query limit + // The number of in is controlled within 100, the shorter the length of the SQL statement, the better + if (subQueryLimit > QUERY_LIMIT_SIZE) { + subQueryLimit = 50; + } + List result = new ArrayList(dataIds.size()); + + String sqlStart = "select data_id, group_id, tenant_id, app_name, content from config_info where group_id = ? and tenant_id = ? and data_id in ("; + String sqlEnd = ")"; + StringBuilder subQuerySql = new StringBuilder(); + + for (int i = 0; i < dataIds.size(); i += subQueryLimit) { + // dataids + List params = new ArrayList( + dataIds.subList(i, i + subQueryLimit < dataIds.size() ? i + subQueryLimit : dataIds.size())); + + for (int j = 0; j < params.size(); j++) { + subQuerySql.append("?"); + if (j != params.size() - 1) { + subQuerySql.append(","); + } + } + + // group + params.add(0, group); + params.add(1, tenantTmp); + + List r = this.jt + .query(sqlStart + subQuerySql.toString() + sqlEnd, params.toArray(), CONFIG_INFO_ROW_MAPPER); + + // assert not null + if (r != null && r.size() > 0) { + result.addAll(r); + } + } + return result; +2:970c +3:970c + return null; +====1 +1:1420,1463c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + if (StringUtils.isBlank(appName)) { + return this.findAllConfigInfo(pageNo, pageSize, tenantTmp); + } else { + return this.findConfigInfoByApp(pageNo, pageSize, tenantTmp, appName); + } + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + where += " and tenant_id like ? "; + params.add(generateLikeArgument(tenantTmp)); + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:976c +3:976c + return null; +====1 +1:1469,1562c + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + for (ConfigKey configInfo : configKeys) { + String dataId = configInfo.getDataId(); + String group = configInfo.getGroup(); + String appName = configInfo.getAppName(); + + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + return helper.fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:982c +3:982c + return null; +====1 +1:1572,1636c + PaginationHelper helper = createPaginationHelper(); + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info"; + StringBuilder where = new StringBuilder(" where "); + List params = new ArrayList(); + params.add(generateLikeArgument(tenantTmp)); + if (StringUtils.isNotBlank(configTags)) { + sqlCountRows = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id "; + sqlFetchRows = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join config_tags_relation b on a.id=b.id "; + + where.append(" a.tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and a.data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and a.group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and a.app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and a.content like ? "); + params.add(generateLikeArgument(content)); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + params.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and content like ? "); + params.add(generateLikeArgument(content)); + } + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:992,1008c +3:992,1008c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.like(tenant)); + } + if (StringUtils.isNotBlank(content)) { + booleanBuilder.and(qConfigInfo.content.like(content)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1642,1672c + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + throw new IOException("invalid param"); + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,content from config_info where "; + String where = " 1=1 and tenant_id='' "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1014c +3:1014c + return null; +====1 +1:1678,1691c + String sql = "SELECT id,data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; + + try { + return this.jt + .queryForObject(sql, new Object[] {dataId, group, tenantTmp, datumId}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + // EmptyResultDataAccessException, indicating that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); +2:1020,1026c +3:1020,1026c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); +====1 +1:1692a +2:1028,1036c +3:1028,1036c + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenantTmp)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } + + ConfigInfoAggrEntity configInfoAggrEntity = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggr(configInfoAggrEntity); +====1 +1:1697,1710c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "SELECT data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? ORDER BY datum_id"; + + try { + return this.jt.query(sql, new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1041c +3:1041c + return null; +====1 +1:1717,1730c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where data_id=? and " + + "group_id=? and tenant_id=? order by datum_id limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, new Object[] {dataId, group, tenantTmp}, sqlFetchRows, + new Object[] {dataId, group, tenantTmp, (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_AGGR_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1048,1058c +3:1048,1058c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + org.springframework.data.domain.Page sPage = configInfoAggrRepository.findAll( + qConfigInfoAggr.dataId.eq(dataId).and(qConfigInfoAggr.groupId.eq(group)) + .and(qConfigInfoAggr.tenantId.eq(tenantTmp)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.by("datumId")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggrList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1737,1831c + String sqlCountRows = "select count(*) from config_info_aggr where "; + String sqlFetchRows = "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + + for (ConfigKey configInfoAggr : configKeys) { + String dataId = configInfoAggr.getDataId(); + String group = configInfoAggr.getGroup(); + String appName = configInfoAggr.getAppName(); + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + Page result = helper + .fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_AGGR_ROW_MAPPER); + return result; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1065c +3:1065c + return null; +====1 +1:1836,1848c + String sql = "SELECT DISTINCT data_id, group_id, tenant_id FROM config_info_aggr"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_CHANGED_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1070,1071c +3:1070,1071c + List list = configInfoAggrRepository.findAllAggrGroup(); + return ConfigInfoChangedMapStruct.INSTANCE.convertConfigInfoChangedList(list); +====1 +1:1853,1864c + String sql = "SELECT datum_id FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND content = ? "; + + try { + return this.jt.queryForList(sql, new Object[] {dataId, groupId, content}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1076c +3:1076c + return null; +====1 +1:1869,1877c + try { + List> list = jt.queryForList( + "SELECT data_id, group_id, tenant_id, app_name, content, gmt_modified FROM config_info WHERE gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertChangeConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1081,1084c +3:1081,1084c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Iterable iterable = configInfoRepository + .findAll(qConfigInfo.gmtModified.goe(startTime).and(qConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList((List) iterable); +====1 +1:1884,1924c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_modified from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + if (!StringUtils.isBlank(tenantTmp)) { + where += " and tenant_id = ? "; + params.add(tenantTmp); + } + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (startTime != null) { + where += " and gmt_modified >=? "; + params.add(startTime); + } + if (endTime != null) { + where += " and gmt_modified <=? "; + params.add(endTime); + } + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + lastMaxId, CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1091c +3:1091c + return null; +====1 +1:1929,1937c + try { + List> list = jt.queryForList( + "SELECT DISTINCT data_id, group_id, tenant_id FROM his_config_info WHERE op_type = 'D' AND gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertDeletedConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1096,1100c +3:1096,1100c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository.findAll( + qHisConfigInfo.opType.eq("D").and(qHisConfigInfo.gmtModified.goe(startTime)) + .and(qHisConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList((List) iterable); +====1 +1:1943,1947c + final String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + final String tenantTmp = + StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + +2:1105a +3:1105a +====1 +1:1953,1960c + + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + + KeyHolder keyHolder = new GeneratedKeyHolder(); + + final String sql = + "INSERT INTO config_info(data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_create," + + "gmt_modified,c_desc,c_use,effect,type,c_schema) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; +2:1111,1120c +3:1111,1120c + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setGmtCreate(time); + configInfoEntity.setGmtModified(time); +====1 +1:1963,1991c + jt.update(new PreparedStatementCreator() { + @Override + public PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + PreparedStatement ps = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS); + ps.setString(1, configInfo.getDataId()); + ps.setString(2, configInfo.getGroup()); + ps.setString(3, tenantTmp); + ps.setString(4, appNameTmp); + ps.setString(5, configInfo.getContent()); + ps.setString(6, md5Tmp); + ps.setString(7, srcIp); + ps.setString(8, srcUser); + ps.setTimestamp(9, time); + ps.setTimestamp(10, time); + ps.setString(11, desc); + ps.setString(12, use); + ps.setString(13, effect); + ps.setString(14, type); + ps.setString(15, schema); + return ps; + } + }, keyHolder); + Number nu = keyHolder.getKey(); + if (nu == null) { + throw new IllegalArgumentException("insert config_info fail"); + } + return nu.longValue(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:1123,1124c +3:1123,1124c + return configInfoRepository.save(configInfoEntity).getId(); + } catch (Exception e) { +====1 +1:2008,2015c + try { + jt.update( + "INSERT INTO config_tags_relation(id,tag_name,tag_type,data_id,group_id,tenant_id) VALUES(?,?,?,?,?,?)", + configId, tagName, null, dataId, group, tenant); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1141,1147c +3:1141,1147c + ConfigTagsRelationEntity configTagsRelation = new ConfigTagsRelationEntity(); + configTagsRelation.setId(configId); + configTagsRelation.setTagName(tagName); + configTagsRelation.setDataId(dataId); + configTagsRelation.setGroupId(group); + configTagsRelation.setTenantId(tenant); + configTagsRelationRepository.save(configTagsRelation); +====1 +1:2020,2025c + try { + jt.update("DELETE FROM config_tags_relation WHERE id=?", id); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1152c +3:1152c + configTagsRelationRepository.findById(id).ifPresent(s -> configTagsRelationRepository.delete(s)); +====1 +1:2030,2040c + String sql = "SELECT tag_name FROM config_tags_relation WHERE tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1157c +3:1157c + return null; +====1 +1:2045,2055c + String sql = "SELECT tag_name FROM config_tags_relation WHERE data_id=? AND group_id=? AND tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {dataId, group, tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1162,1176c +3:1162,1176c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigTagsRelationEntity qConfigTagsRelation = QConfigTagsRelationEntity.configTagsRelationEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigTagsRelation.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigTagsRelation.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigTagsRelation.tenantId.eq(tenant)); + } + Iterable iterable = configTagsRelationRepository.findAll(booleanBuilder); + List result = new ArrayList<>(); + iterable.forEach(s -> result.add(s.getTagName())); + return result; +====1 +1:2061,2067c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + jt.update("DELETE FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, group, + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1182,1187c +3:1182,1187c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); +====1 +1:2068a +2:1189,1190c +3:1189,1190c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + configInfos.forEach(s -> configInfoRepository.delete(s)); +====1 +1:2076,2077c + StringBuilder sql = new StringBuilder(SQL_DELETE_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1198,1200c +3:1198,1200c + if (StringUtils.isBlank(ids)) { + return; + } +====1 +1:2081,2084c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1203a +3:1203a +====1 +1:2087,2093c + sql.append(") "); + try { + jt.update(sql.toString(), paramList.toArray()); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1206,1219c +3:1206,1219c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + for (Long id : paramList) { + configInfoRepository.deleteById(id); + } + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2099,2106c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String tagTmp = StringUtils.isBlank(tag) ? StringUtils.EMPTY : tag; + try { + jt.update("DELETE FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", dataId, + group, tenantTmp, tagTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1225,1234c +3:1225,1234c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); +====1 +1:2107a +2:1236,1251c +3:1236,1251c + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + Iterable configInfoTags = configInfoTagRepository.findAll(booleanBuilder); + configInfoTags.forEach(s -> configInfoTagRepository.delete(s)); + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2113,2115c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String tenantTmp = StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:1257,1258c +3:1257,1258c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:2122,2131c + try { + jt.update("UPDATE config_info SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?," + + "app_name=?,c_desc=?,c_use=?,effect=?,type=?,c_schema=? " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5Tmp, srcIp, srcUser, + time, appNameTmp, desc, use, effect, type, schema, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1265,1272c +3:1265,1272c + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setGmtModified(time); + configInfoRepository.save(configInfoEntity); +====1 +1:2139,2140c + StringBuilder sql = new StringBuilder(SQL_FIND_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1279a +3:1279a +====1 +1:2144,2147c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1282a +3:1282a +====1 +1:2150,2158c + sql.append(") "); + try { + return this.jt.query(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1285,1288c +3:1285,1288c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + List list = (List) configInfoRepository + .findAll(qConfigInfo.id.in(paramList)); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(list); +====1 +1:2163,2176c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAdvanceInfo configAdvance = this.jt.queryForObject( + "SELECT gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_ADVANCE_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1293,1314c +3:1293,1314c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAdvanceInfo configAdvance = ConfigAdvanceInfoMapStruct.INSTANCE.convertConfigAdvanceInfo(configInfo); + List configTagList = this.selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2178c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1315a +3:1315a +====1 +1:2180,2185c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1317c +3:1317c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2186a +2:1319c +3:1319c + return configAdvance; +====1 +1:2191,2206c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAllInfo configAdvance = this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5," + + "gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_ALL_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1324,1346c +3:1324,1346c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAllInfo configAdvance = ConfigAllInfoMapStruct.INSTANCE.convertConfigAllInfo(configInfo); + configAdvance.setGroup(configInfo.getGroupId()); + List configTagList = selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2208c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1347a +3:1347a +====1 +1:2210,2215c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1349c +3:1349c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2216a +2:1351c +3:1351c + return configAdvance; +====1 +1:2225,2233c + try { + jt.update( + "INSERT INTO his_config_info (id,data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_modified,op_type) " + + "VALUES(?,?,?,?,?,?,?,?,?,?,?)", id, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp, appNameTmp, configInfo.getContent(), md5Tmp, srcIp, srcUser, time, ops); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1360,1373c +3:1360,1373c + HisConfigInfoEntity hisConfigInfo = new HisConfigInfoEntity(); + hisConfigInfo.setId(id); + hisConfigInfo.setDataId(configInfo.getDataId()); + hisConfigInfo.setGroupId(configInfo.getGroup()); + hisConfigInfo.setAppName(appNameTmp); + hisConfigInfo.setContent(configInfo.getContent()); + hisConfigInfo.setMd5(md5Tmp); + hisConfigInfo.setGmtModified(time); + hisConfigInfo.setSrcUser(srcUser); + hisConfigInfo.setSrcIp(srcIp); + hisConfigInfo.setOpType(ops); + hisConfigInfo.setTenantId(tenantTmp); + hisConfigInfo.setGmtCreate(time); + hisConfigInfoRepository.save(hisConfigInfo); +====1 +1:2239,2255c + PaginationHelper helper = createPaginationHelper(); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from his_config_info where data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select nid,data_id,group_id,tenant_id,app_name,src_ip,src_user,op_type,gmt_create,gmt_modified from his_config_info " + + "where data_id = ? and group_id = ? and tenant_id = ? order by nid desc"; + + Page page = null; + try { + page = helper + .fetchPage(sqlCountRows, sqlFetchRows, new Object[] {dataId, group, tenantTmp}, pageNo, pageSize, + HISTORY_LIST_ROW_MAPPER); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG + .error("[list-config-history] error, dataId:{}, group:{}", new Object[] {dataId, group}, e); + throw e; + } +2:1379,1389c +3:1379,1389c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + org.springframework.data.domain.Page sPage = hisConfigInfoRepository.findAll( + qHisConfigInfo.dataId.eq(dataId).and(qHisConfigInfo.groupId.eq(group)) + .and(qHisConfigInfo.tenantId.eq(tenant)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("nid")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); +====1 +1:2262,2270c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "INSERT INTO app_configdata_relation_subs(data_id,group_id,app_name,gmt_modified) VALUES(?,?,?,?)", + dataId, group, appNameTmp, date); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1395a +3:1395a +====1 +1:2276,2284c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "UPDATE app_configdata_relation_subs SET gmt_modified=? WHERE data_id=? AND group_id=? AND app_name=?", + time, dataId, group, appNameTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1401c +3:1401c + +==== +1:2289c + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; +2:1406,1434c + <<<<<<< HEAD + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + ||||||| a41d209d5 + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[list-config-history] error, nid:{}", new Object[] {nid}, e); + throw e; + } + ======= + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[detail-config-history] error, nid:{}", new Object[] {nid}, e); + throw e; + } + } + + @Override + public ConfigHistoryInfo detailPreviousConfigHistory(Long id) { + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = (select max(nid) from his_config_info where id = ?) "; +3:1406,1413c + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + } + + @Override + public ConfigHistoryInfo detailPreviousConfigHistory(Long id) { + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = (select max(nid) from his_config_info where id = ?) "; +====1 +1:2292c + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); +2:1437c +3:1416c + .queryForObject(sqlFetchRows, new Object[] {id}, HISTORY_DETAIL_ROW_MAPPER); +====1 +1:2295c + LogUtil.FATAL_LOG.error("[list-config-history] error, nid:{}", new Object[] {nid}, e); +2:1440c +3:1419c + LogUtil.FATAL_LOG.error("[detail-previous-config-history] error, id:{}", new Object[] {id}, e); +====2 +1:2297a +3:1421a +2:1443c + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:2303,2310c + try { + jt.update( + "INSERT INTO tenant_info(kp,tenant_id,tenant_name,tenant_desc,create_source,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?)", + kp, tenantId, tenantName, tenantDesc, createResoure, time, time); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1449,1457c +3:1427,1435c + TenantInfoEntity tenantInfo = new TenantInfoEntity(); + tenantInfo.setKp(kp); + tenantInfo.setTenantId(tenantId); + tenantInfo.setTenantName(tenantName); + tenantInfo.setTenantDesc(tenantDesc); + tenantInfo.setCreateSource(createResoure); + tenantInfo.setGmtCreate(time); + tenantInfo.setGmtModified(time); + tenantInfoRepository.save(tenantInfo); +====1 +1:2315,2322c + try { + jt.update( + "UPDATE tenant_info SET tenant_name = ?, tenant_desc = ?, gmt_modified= ? WHERE kp=? AND tenant_id=?", + tenantName, tenantDesc, System.currentTimeMillis(), kp, tenantId); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1462,1467c +3:1440,1445c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + tenantInfoRepository.findOne(qTenantInfo.kp.eq(kp).and(qTenantInfo.tenantId.eq(tenantId))).ifPresent(s -> { + s.setTenantName(tenantName); + s.setTenantDesc(tenantDesc); + tenantInfoRepository.save(s); + }); +====1 +1:2327,2338c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=?"; + try { + return this.jt.query(sql, new Object[] {kp}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1472,1473c +3:1450,1451c + List list = tenantInfoRepository.findByKp(kp); + return TenantInfoMapStruct.INSTANCE.convertTenantInfoList(list); +====1 +1:2343,2354c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=? AND tenant_id=?"; + try { + return jt.queryForObject(sql, new Object[] {kp, tenantId}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1478,1479c +3:1456,1457c + TenantInfoEntity tenantInfoEntity = tenantInfoRepository.findByKpAndTenantId(kp, tenantId); + return TenantInfoMapStruct.INSTANCE.convertTenantInfo(tenantInfoEntity); +====1 +1:2359,2364c + try { + jt.update("DELETE FROM tenant_info WHERE kp=? AND tenant_id=?", kp, tenantId); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1484,1485c +3:1462,1463c + tenantInfoRepository.findOne(QTenantInfoEntity.tenantInfoEntity.tenantId.eq(tenantId) + .and(QTenantInfoEntity.tenantInfoEntity.kp.eq(kp))).ifPresent(s -> tenantInfoRepository.delete(s)); +====1 +1:2418,2431c + String sqlCountRows = " SELECT COUNT(*) FROM config_info "; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,md5,type,gmt_modified FROM " + + "( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) g, config_info t WHERE g.id = t.id"; + PaginationHelper helper = createPaginationHelper(); + try { + Page page = helper + .fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_WRAPPER_ROW_MAPPER); + + return page.getPageItems(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1539c +3:1517c + return null; +====1 +1:2448,2458c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,type,gmt_modified,md5 FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1556,1560c +3:1534,1538c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + ConfigInfoEntity result = configInfoRepository.findOne( + qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))).orElse(null); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapper(result); +====1 +1:2463,2469c + String sql = String.format("select 1 from %s limit 1", tableName); + try { + jt.queryForObject(sql, Integer.class); + return true; + } catch (Throwable e) { + return false; + } +2:1565c +3:1543c + return true; +====1 +1:2518,2530c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList<>(); + if (!CollectionUtils.isEmpty(ids)) { + where.append(" id in ("); + for (int i = 0; i < ids.size(); i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(ids.get(i)); + } + where.append(") "); +2:1614,1617c +3:1592,1595c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (!org.springframework.util.CollectionUtils.isEmpty(ids)) { + booleanBuilder.and(qConfigInfo.id.in(ids)); +====1 +1:2532,2536c + where.append(" tenant_id=? "); + paramList.add(tenantTmp); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + paramList.add(generateLikeArgument(dataId)); +2:1619,1623c +3:1597,1601c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.like(dataId)); +====1 +1:2539,2540c + where.append(" and group_id=? "); + paramList.add(group); +2:1626c +3:1604c + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:2543,2544c + where.append(" and app_name=? "); + paramList.add(appName); +2:1629c +3:1607c + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:2547,2552c + try { + return this.jt.query(SQL_FIND_ALL_CONFIG_INFO + where, paramList.toArray(), CONFIG_ALL_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1632,1640c +3:1610,1618c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + List resultList = new ArrayList<>(); + configInfos.forEach(s -> { + ConfigAllInfo configAllInfo = new ConfigAllInfo(); + BeanUtils.copyProperties(s, configAllInfo); + configAllInfo.setGroup(s.getGroupId()); + resultList.add(configAllInfo); + }); + return resultList; +====1 +1:2643,2647c + Integer result = this.jt + .queryForObject(SQL_TENANT_INFO_COUNT_BY_TENANT_ID, new String[] {tenantId}, Integer.class); + if (result == null) { + return 0; + } +2:1731,1732c +3:1709,1710c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + Long result = tenantInfoRepository.count(qTenantInfo.tenantId.eq(tenantId)); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_MergeDatumService.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_MergeDatumService.java.txt new file mode 100644 index 0000000000..0fe966b505 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_MergeDatumService.java.txt @@ -0,0 +1,43 @@ +====1 +1:30,31c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:30,32c +3:30,32c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.ApplicationUtils; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:109c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIp()); +2:110c +3:110c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIP()); +====1 +1:117c + if (ApplicationUtils.getStandaloneMode()) { +2:118c +3:118c + if (EnvUtil.getStandaloneMode()) { +==== +1:166c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); +2:167,175c + <<<<<<< HEAD + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn( + "[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + ||||||| a41d209d5 + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn("[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + ======= + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIP(), null); +3:167c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIP(), null); +====2 +1:168c +3:169c + + group); +2:177,178c + >>>>>>> TEMP_RIGHT_BRANCH + + group); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_MergeTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_MergeTaskProcessor.java.txt new file mode 100644 index 0000000000..5df60ac501 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_MergeTaskProcessor.java.txt @@ -0,0 +1,47 @@ +==== +1:19a +2:20,26c + <<<<<<< HEAD + import com.alibaba.nacos.common.task.AbstractDelayTask; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; + ======= + import com.alibaba.nacos.common.task.NacosTask; +3:20c + import com.alibaba.nacos.common.task.NacosTask; +==== +1:21c + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:28c + >>>>>>> TEMP_RIGHT_BRANCH +3:21a +====1 +1:22a +2:30c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:31c + import com.alibaba.nacos.core.utils.InetUtils; +2:39c +3:32c + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:55c + public boolean process(AbstractDelayTask task) { +2:63c +3:56c + public boolean process(NacosTask task) { +====1 +1:87c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +2:95c +3:88c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), +====1 +1:101c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +2:109c +3:102c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_NotifyTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_NotifyTaskProcessor.java.txt new file mode 100644 index 0000000000..9dcd6442e3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_NotifyTaskProcessor.java.txt @@ -0,0 +1,67 @@ +==== +1:19a +2:20,26c + <<<<<<< HEAD + import com.alibaba.nacos.common.task.AbstractDelayTask; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; + ======= + import com.alibaba.nacos.common.task.NacosTask; +3:20c + import com.alibaba.nacos.common.task.NacosTask; +==== +1:21c + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:28c + >>>>>>> TEMP_RIGHT_BRANCH +3:21a +====1 +1:22a +2:30c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:27,28c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:35,36c +3:28,29c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:49c + public boolean process(AbstractDelayTask task) { +2:57c +3:50c + public boolean process(NacosTask task) { +====1 +1:76c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +2:84c +3:77c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====1 +1:78c + .format(URL_PATTERN, serverIp, ApplicationUtils.getContextPath(), dataId, group); +2:86c +3:79c + .format(URL_PATTERN, serverIp, EnvUtil.getContextPath(), dataId, group); +====1 +1:82c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:90c +3:83c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====1 +1:92c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:100c +3:93c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====1 +1:100c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:108c +3:101c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_application.properties.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_application.properties.txt new file mode 100644 index 0000000000..d78b420770 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_application.properties.txt @@ -0,0 +1,89 @@ +====1 +1:40,41c + # db.user=nacos + # db.password=nacos +2:40,41c +3:40,41c + # db.user.0=nacos + # db.password.0=nacos +====1 +1:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-fe/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +2:112c +3:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-ui/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +==== +1:177a +2:178,214c + + <<<<<<< HEAD + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + + ||||||| a41d209d5 + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:178,211c + + + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..99b5611a8e --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_ignorespace/diff_pom.xml.txt @@ -0,0 +1,169 @@ +====1 +1:25c + 1.4.0-SNAPSHOT +2:25c +3:25c + 1.4.1-SNAPSHOT +====1 +1:39c + nacos-all-1.4.0-SNAPSHOT +2:39c +3:39c + nacos-all-1.4.1-SNAPSHOT +====1 +1:129c + 2.1.16.RELEASE +2:129c +3:129c + 2.1.17.RELEASE +====1 +1:131c + 2.6 +2:130a +3:130a +====1 +1:133c + 2.2 +2:132c +3:132c + 2.6 +====1 +1:144c + 1.7.17 +2:142a +3:142a +====1 +1:170a +2:169,175c +3:169,175c + 1.3.2.beta1 + 1.3.2.beta1 + 1.3.1.Final + 19.3.0.0 + 4.2.1 + 3.4.1 + 1.18.12 +====1 +1:279a +2:285,287c +3:285,287c + **/com/alibaba/nacos/config/server/modules/entity/*.java + **/com/alibaba/nacos/config/server/modules/mapstruct/*.java + **/com/alibaba/nacos/config/server/configuration/datasource/DynamicDataSource.java +==== +1:307c + **/istio/model/**,**/nacos/test/** +2:315,321c + <<<<<<< HEAD + **/istio/model/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** + ||||||| a41d209d5 + **/istio/model/**,**/nacos/test/** + ======= + **/istio/model/**,**/consistency/entity/**,**/nacos/test/** + >>>>>>> TEMP_RIGHT_BRANCH +3:315c + **/istio/model/**,**/consistency/entity/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** +====1 +1:341a +2:356c +3:350c + /console-ui/** +====1 +1:553a +2:569,587c +3:563,581c + + remove-test-data + + + + org.apache.maven.plugins + maven-clean-plugin + + false + + + ${user.home}/nacos/data + + + + + + + +====1 +1:581a +2:616c +3:610c + sys +====1 +1:688a +2:724,728c +3:718,722c + + ${project.groupId} + nacos-sys + ${project.version} + +====1 +1:712,717c + + commons-lang + commons-lang + ${commons-lang.version} + + +2:751a +3:745a +====1 +1:817,822c + + com.ning + async-http-client + ${async-http-client.version} + + +2:850a +3:844a +====1 +1:1027a +2:1056,1093c +3:1050,1087c + + + org.mapstruct + mapstruct-jdk8 + ${mapstruct.version} + + + + org.mapstruct + mapstruct-processor + ${mapstruct.version} + + + + org.projectlombok + lombok + true + ${lombok.version} + + + + com.querydsl + querydsl-jpa + ${querydsl.version} + + + + com.zaxxer + HikariCP + ${hikariCP.version} + + + + com.oracle.ojdbc + ojdbc8 + ${ojdbc.version} + + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_AsyncNotifyService.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_AsyncNotifyService.java.txt new file mode 100644 index 0000000000..f5fe26eb1d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_AsyncNotifyService.java.txt @@ -0,0 +1,83 @@ +====1 +1:35,36c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:35,36c +3:35,36c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:107c + +2:107c +3:107c + +====1 +1:130c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +2:130c +3:130c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +====2 +1:135a +3:135a +2:136,143c + <<<<<<< HEAD + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, + String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); + ||||||| a41d209d5 + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); + ======= +==== +1:137c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +2:145,146c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); + >>>>>>> TEMP_RIGHT_BRANCH +3:137c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====1 +1:157c + +2:166c +3:157c + +====1 +1:171c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +2:180c +3:171c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +====1 +1:177c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +2:186c +3:177c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +====1 +1:199c + InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +2:208c +3:199c + InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +====1 +1:262c + this.url = MessageFormat.format(URL_PATTERN, target, ApplicationUtils.getContextPath(), dataId, group); +2:271c +3:262c + this.url = MessageFormat.format(URL_PATTERN, target, EnvUtil.getContextPath(), dataId, group); +====1 +1:265c + .format(URL_PATTERN_TENANT, target, ApplicationUtils.getContextPath(), dataId, group, tenant); +2:274c +3:265c + .format(URL_PATTERN_TENANT, target, EnvUtil.getContextPath(), dataId, group, tenant); +====1 +1:312c + } +\ No newline at end of file +2:321c +3:312c + } diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_ConfigController.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_ConfigController.java.txt new file mode 100644 index 0000000000..c1619171c1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_ConfigController.java.txt @@ -0,0 +1,193 @@ +====1 +1:18a +2:19c +3:19c + import com.alibaba.nacos.api.config.ConfigType; +====1 +1:22a +2:24c +3:24c + import com.alibaba.nacos.common.model.RestResultUtils; +==== +1:34a +2:37,38c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; + <<<<<<< HEAD +3:37c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +====2 +1:35a +3:38a +2:40,43c + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.result.ResultBuilder; + ======= + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:38c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +2:45a +3:40a +====1 +1:45a +2:53c +3:48c + import com.alibaba.nacos.common.utils.NamespaceUtil; +====1 +1:48c + import com.alibaba.nacos.core.utils.InetUtils; +2:56c +3:51c + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:93,94c + private static final String NAMESPACE_PUBLIC_KEY = "public"; + +2:100a +3:95a +====1 +1:137a +2:144,147c +3:139,142c + //check type + if (!ConfigType.isValidType(type)) { + type = ConfigType.getDefaultType().getType(); + } +====1 +1:178c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:188c +3:183c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:199c + tenant = processTenant(tenant); +2:209c +3:204c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====1 +1:283c + return ResultBuilder.buildSuccessResult(true); +2:293c +3:288c + return RestResultUtils.success(true); +====1 +1:472c + tenant = processTenant(tenant); +2:482c +3:477c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====1 +1:527c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:537c +3:532c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:530,534c + if (StringUtils.isNotBlank(namespace)) { + if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); + } +2:540,543c +3:535,538c + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====1 +1:535a +2:545c +3:540c + +====1 +1:548c + return ResultBuilder.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +2:558c +3:553c + return RestResultUtils.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +====1 +1:560c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +2:570c +3:565c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +====1 +1:584c + return ResultBuilder.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +2:594c +3:589c + return RestResultUtils.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +====1 +1:588c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:598c +3:593c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:601c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:611c +3:606c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:604c + return ResultBuilder.buildSuccessResult("导入成功", saveResult); +2:614c +3:609c + return RestResultUtils.success("导入成功", saveResult); +====1 +1:628c + return ResultBuilder.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +2:638c +3:633c + return RestResultUtils.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +====1 +1:631,634c + + if (NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(namespace)) { + namespace = ""; + } else if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { +2:641,643c +3:636,638c + + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { +====1 +1:636c + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +2:645c +3:640c + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====1 +1:650c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:659c +3:654c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:674c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:683c +3:678c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:687c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:696c +3:691c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:690,697c + return ResultBuilder.buildSuccessResult("Clone Completed Successfully", saveResult); + } + + private String processTenant(String tenant) { + if (StringUtils.isEmpty(tenant) || NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(tenant)) { + return ""; + } + return tenant; +2:699c +3:694c + return RestResultUtils.success("Clone Completed Successfully", saveResult); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_DiskUtils.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_DiskUtils.java.txt new file mode 100644 index 0000000000..edbfb2552d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_DiskUtils.java.txt @@ -0,0 +1,24 @@ +356,364d355 +< <<<<<<< HEAD:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileOutputStream fos = new FileOutputStream( +< outputFile); final CheckedOutputStream cos = new CheckedOutputStream(fos, +< checksum); final ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(cos))) { +< ||||||| a41d209d5:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileOutputStream fos = new FileOutputStream( +< outputFile); final CheckedOutputStream cos = new CheckedOutputStream(fos, checksum); +< final ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(cos))) { +< ======= +368d358 +< >>>>>>> TEMP_RIGHT_BRANCH:sys/src/main/java/com/alibaba/nacos/sys/utils/DiskUtils.java +407,415d396 +< <<<<<<< HEAD:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileInputStream fis = new FileInputStream( +< sourceFile); final CheckedInputStream cis = new CheckedInputStream(fis, +< checksum); final ZipInputStream zis = new ZipInputStream(new BufferedInputStream(cis))) { +< ||||||| a41d209d5:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileInputStream fis = new FileInputStream( +< sourceFile); final CheckedInputStream cis = new CheckedInputStream(fis, checksum); +< final ZipInputStream zis = new ZipInputStream(new BufferedInputStream(cis))) { +< ======= +419d399 +< >>>>>>> TEMP_RIGHT_BRANCH:sys/src/main/java/com/alibaba/nacos/sys/utils/DiskUtils.java diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_EmbeddedRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_EmbeddedRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..465add68ba --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_EmbeddedRolePersistServiceImpl.java.txt @@ -0,0 +1,186 @@ +====1 +1:21c + import com.alibaba.nacos.config.server.service.repository.embedded.EmbeddedStoragePersistServiceImpl; +2:20a +3:20a +====1 +1:23a +2:23c +3:23c + import com.alibaba.nacos.config.server.service.repository.embedded.EmbeddedStoragePersistServiceImpl; +====1 +1:30a +2:31c +3:31c + import java.util.Collections; +====2 +1:43c +3:44c + +2:44c + +====2 +1:46c +3:47c + +2:47c + +====2 +1:49c +3:50c + +2:50c + +====2 +1:51c +3:52c + +2:52c + +====2 +1:53c +3:54c + +2:54c + +====2 +1:56c +3:57c + +2:57c + +====2 +1:58c +3:59c + +2:59c + +====2 +1:68c +3:69c + +2:69c + +====2 +1:70c +3:71c + +2:71c + +====2 +1:72c +3:73c + +2:73c + +====2 +1:74c +3:75c + +2:75c + +==== +1:77,80c + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { +2:78,95c + <<<<<<< HEAD + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + ||||||| a41d209d5 + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + ======= + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { + >>>>>>> TEMP_RIGHT_BRANCH +3:78,83c + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { +====2 +1:82a +3:85a +2:98,104c + <<<<<<< HEAD + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + ||||||| a41d209d5 + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + ======= +==== +1:84c + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, +2:106,107c + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, + >>>>>>> TEMP_RIGHT_BRANCH +3:87c + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, +====2 +1:86c +3:89c + +2:109c + +====2 +1:88c +3:91c + +2:111c + +====2 +1:96c +3:99c + +2:119c + +====2 +1:98c +3:101c + +2:121c + +====2 +1:106c +3:109c + +2:129c + +====2 +1:121c +3:124c + +2:144c + +====2 +1:137c +3:140c + +2:160c + +====2 +1:141c +3:144c + List users = databaseOperate.queryMany(sql, new String[] {"%" + role + "%"}, String.class); +2:164c + List users = databaseOperate.queryMany(sql, new String[]{"%" + role + "%"}, String.class); +====2 +1:144c +3:147c + +2:167c + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_ExternalDataSourceServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_ExternalDataSourceServiceImpl.java.txt new file mode 100644 index 0000000000..12f8b7755c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_ExternalDataSourceServiceImpl.java.txt @@ -0,0 +1,194 @@ +====1 +1:19,39c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import javax.sql.DataSource; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + +2:18a +3:18a +==== +1:40a +2:20,70c + <<<<<<< HEAD + import com.alibaba.nacos.common.utils.StringUtils; + import com.alibaba.nacos.config.server.monitor.MetricsMonitor; + import com.alibaba.nacos.config.server.utils.PropertyUtil; + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.zaxxer.hikari.HikariDataSource; + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + ||||||| a41d209d5 + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import javax.sql.DataSource; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import com.alibaba.nacos.common.utils.ConvertUtils; + import com.alibaba.nacos.common.utils.StringUtils; + import com.alibaba.nacos.config.server.monitor.MetricsMonitor; + import com.alibaba.nacos.config.server.utils.ConfigExecutor; + import com.alibaba.nacos.config.server.utils.PropertyUtil; + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.zaxxer.hikari.HikariDataSource; + ======= + import com.alibaba.nacos.common.utils.IPUtil; +3:20c + import com.alibaba.nacos.common.utils.IPUtil; +====1 +1:45c + import com.alibaba.nacos.core.utils.ApplicationUtils; +2:75c +3:25c + import com.alibaba.nacos.sys.env.EnvUtil; +==== +1:46a +2:77,92c + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + >>>>>>> TEMP_RIGHT_BRANCH + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +3:27,41c + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +====1 +1:55,58c + private static final Logger LOGGER = LoggerFactory.getLogger(ExternalDataSourceServiceImpl.class); + + private static final String JDBC_DRIVER_NAME = "com.mysql.cj.jdbc.Driver"; + +2:100a +3:49a +====1 +1:86,87c + private static Pattern ipPattern = Pattern.compile("\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}"); + +2:127a +3:76a +====1 +1:120,122c + + ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +2:160,162c +3:109,111c + + // ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + // ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +====1 +1:130c + .build(ApplicationUtils.getEnvironment(), (dataSource) -> { +2:170c +3:119c + .build(EnvUtil.getEnvironment(), (dataSource) -> { +====1 +1:193c + return "DOWN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +2:233c +3:182c + return "DOWN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====1 +1:196c + return "WARN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +2:236c +3:185c + return "WARN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====1 +1:204,217c + private String getIpFromUrl(String url) { + + Matcher m = ipPattern.matcher(url); + if (m.find()) { + return m.group(); + } + + return ""; + } + + static String defaultIfNull(String value, String defaultValue) { + return null == value ? defaultValue : value; + } + +2:243a +3:192a +====1 +1:272c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +2:298c +3:247c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); +====1 +1:275c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +2:301c +3:250c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_ExternalPermissionPersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_ExternalPermissionPersistServiceImpl.java.txt new file mode 100644 index 0000000000..3c95b4eb48 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_ExternalPermissionPersistServiceImpl.java.txt @@ -0,0 +1,215 @@ +==== +1:21c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +2:21,31c + <<<<<<< HEAD + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + ======= +3:20a +====1 +1:22a +2:33c +3:22c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +==== +1:24a +2:36c + >>>>>>> TEMP_RIGHT_BRANCH +3:25,28c + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; +====1 +1:27,28c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; +2:39c +3:31c + import org.springframework.data.domain.PageRequest; +====2 +1:30a +3:33a +2:42,49c + <<<<<<< HEAD + ||||||| a41d209d5 + import javax.annotation.PostConstruct; + import java.util.ArrayList; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.PERMISSION_ROW_MAPPER; + + ======= +====1 +1:32a +2:52,53c +3:36,37c + import java.util.Collections; + import java.util.List; +====2 +1:35a +3:40a +2:57c + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:46,53c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:68c +3:51c + private PermissionsRepository permissionsRepository; +==== +1:56,83c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role='" + role + "' "; + + if (StringUtils.isBlank(role)) { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:71,140c + <<<<<<< HEAD + org.springframework.data.domain.Page sPage = permissionsRepository + .findAll(QPermissionsEntity.permissionsEntity.role.eq(role), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(PermissionsMapStruct.INSTANCE.convertPermissionInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + ||||||| a41d209d5 + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role='" + role + "' "; + + if (StringUtils.isBlank(role)) { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + ======= + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(role)) { + params = Collections.singletonList(role); + } else { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:54,61c + org.springframework.data.domain.Page sPage = permissionsRepository + .findAll(QPermissionsEntity.permissionsEntity.role.eq(role), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(PermissionsMapStruct.INSTANCE.convertPermissionInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:94,102c + + String sql = "INSERT into permissions (role, resource, action) VALUES (?, ?, ?)"; + + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:151,152c +3:72,73c + + permissionsRepository.save(new PermissionsEntity(role, resource, action)); +====1 +1:113,120c + + String sql = "DELETE from permissions WHERE role=? and resource=? and action=?"; + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:163,167c +3:84,88c + + QPermissionsEntity qPermissions = QPermissionsEntity.permissionsEntity; + permissionsRepository.findOne( + qPermissions.role.eq(role).and(qPermissions.resource.eq(resource)).and(qPermissions.action.eq(action))) + .ifPresent(p -> permissionsRepository.delete(p)); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_ExternalRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_ExternalRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..b2b75323b7 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_ExternalRolePersistServiceImpl.java.txt @@ -0,0 +1,283 @@ +==== +1:21c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +2:21,31c + <<<<<<< HEAD + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + ======= +3:20a +====1 +1:22a +2:33c +3:22c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +==== +1:24a +2:36c + >>>>>>> TEMP_RIGHT_BRANCH +3:25,28c + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; +====1 +1:27,29c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.RowMapper; +2:39c +3:31c + import org.springframework.data.domain.PageRequest; +====2 +1:31a +3:33a +2:42,48c + <<<<<<< HEAD + ||||||| a41d209d5 + import javax.annotation.PostConstruct; + import java.sql.ResultSet; + import java.sql.SQLException; + import java.util.ArrayList; + ======= +==== +1:35a +2:53,54c + import java.util.Collections; + >>>>>>> TEMP_RIGHT_BRANCH +3:38c + import java.util.Collections; +====1 +1:36a +2:56c +3:40c + import java.util.stream.Collectors; +====1 +1:38c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.ROLE_INFO_ROW_MAPPER; +2:57a +3:41a +====1 +1:50,57c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:69c +3:53c + private RolesRepository rolesRepository; +====1 +1:61,81c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from (select distinct role from roles) roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " 1=1 "; + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + return pageInfo; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:73,80c +3:57,64c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +==== +1:86,104c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + where = " 1=1 "; + } + + try { + return helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:85,135c + <<<<<<< HEAD + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(QRolesEntity.rolesEntity.username.eq(username), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + ||||||| a41d209d5 + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + where = " 1=1 "; + } + + try { + return helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + ======= + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { + where = " 1=1 "; + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:69,76c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(QRolesEntity.rolesEntity.username.eq(username), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:110c + * @param role role string value. +2:141c +3:82c + * @param role role string value. +====1 +1:115,122c + String sql = "INSERT into roles (role, username) VALUES (?, ?)"; + + try { + jt.update(sql, role, userName); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:146c +3:87c + rolesRepository.save(new RolesEntity(userName, role)); +====1 +1:131,137c + String sql = "DELETE from roles WHERE role=?"; + try { + jt.update(sql, role); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:155,156c +3:96,97c + Iterable iterable = rolesRepository.findAll(QRolesEntity.rolesEntity.role.eq(role)); + rolesRepository.deleteAll(iterable); +====1 +1:143c + * @param role role string value. +2:162c +3:103c + * @param role role string value. +====1 +1:147,153c + String sql = "DELETE from roles WHERE role=? and username=?"; + try { + jt.update(sql, role, username); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:166,168c +3:107,109c + QRolesEntity qRoles = QRolesEntity.rolesEntity; + rolesRepository.findOne(qRoles.role.eq(role).and(qRoles.username.eq(username))) + .ifPresent(s -> rolesRepository.delete(s)); +==== +1:158,160c + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[]{role}, String.class); + return users; +2:173,185c + <<<<<<< HEAD + List rolesEntities = (List) rolesRepository + .findAll(QRolesEntity.rolesEntity.role.like(role)); + return rolesEntities.stream().map(s -> s.getRole()).collect(Collectors.toList()); + ||||||| a41d209d5 + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[]{role}, String.class); + return users; + ======= + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[] {role}, String.class); + return users; + >>>>>>> TEMP_RIGHT_BRANCH +3:114,116c + List rolesEntities = (List) rolesRepository + .findAll(QRolesEntity.rolesEntity.role.like(role)); + return rolesEntities.stream().map(s -> s.getRole()).collect(Collectors.toList()); +====1 +1:163,172c + private static final class RoleInfoRowMapper implements RowMapper { + + @Override + public RoleInfo mapRow(ResultSet rs, int rowNum) throws SQLException { + RoleInfo roleInfo = new RoleInfo(); + roleInfo.setRole(rs.getString("role")); + roleInfo.setUsername(rs.getString("username")); + return roleInfo; + } + } +2:187a +3:118a diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_ExternalStoragePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_ExternalStoragePersistServiceImpl.java.txt new file mode 100644 index 0000000000..44c1365407 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_ExternalStoragePersistServiceImpl.java.txt @@ -0,0 +1,3017 @@ +====1 +1:41,42c + import com.alibaba.nacos.config.server.service.datasource.DataSourceService; + import com.alibaba.nacos.config.server.service.datasource.DynamicDataSource; +2:41,74c +3:41,74c + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.HisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.QHisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QTenantInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.TenantInfoEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAdvanceInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAllInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigHistoryInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4BetaMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4TagMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoAggrMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoBetaWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoChangedMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoEntityMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoTagWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.TenantInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoAggrRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoBetaRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoTagRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigTagsRelationRepository; + import com.alibaba.nacos.config.server.modules.repository.HisConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.TenantInfoRepository; +====1 +1:48c + import com.google.common.collect.Lists; +2:80,81c +3:80,81c + import com.querydsl.core.BooleanBuilder; + import lombok.extern.slf4j.Slf4j; +====1 +1:50a +2:84,85c +3:84,85c + import org.springframework.beans.BeanUtils; + import org.springframework.beans.factory.annotation.Autowired; +====1 +1:55c + import org.springframework.dao.IncorrectResultSizeDataAccessException; +2:90,92c +3:90,92c + import org.springframework.data.domain.PageRequest; + import org.springframework.data.domain.Sort; + import org.springframework.data.jpa.domain.Specification; +====1 +1:57,61c + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.PreparedStatementCreator; + import org.springframework.jdbc.core.PreparedStatementSetter; + import org.springframework.jdbc.support.GeneratedKeyHolder; + import org.springframework.jdbc.support.KeyHolder; +2:93a +3:93a +====1 +1:70c + import javax.annotation.PostConstruct; +2:102,105c +3:102,105c + import javax.persistence.criteria.CriteriaBuilder; + import javax.persistence.criteria.CriteriaQuery; + import javax.persistence.criteria.Predicate; + import javax.persistence.criteria.Root; +====1 +1:72,75c + import java.sql.Connection; + import java.sql.PreparedStatement; + import java.sql.SQLException; + import java.sql.Statement; +2:106a +3:106a +====1 +1:81a +2:113c +3:113c + import java.util.stream.Collectors; +====1 +1:83,97c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ADVANCE_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ALL_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4TAG_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_AGGR_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BASE_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_CHANGED_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_KEY_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_DETAIL_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_LIST_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.TENANT_INFO_ROW_MAPPER; +2:114a +3:114a +====1 +1:104a +2:122c +3:122c + @Slf4j +====1 +1:110c + private DataSourceService dataSourceService; +2:128,129c +3:128,129c + @Autowired + private ConfigInfoRepository configInfoRepository; +====1 +1:112c + private static final String SQL_FIND_ALL_CONFIG_INFO = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,c_schema from config_info"; +2:131,132c +3:131,132c + @Autowired + private ConfigInfoBetaRepository configInfoBetaRepository; +====1 +1:114c + private static final String SQL_TENANT_INFO_COUNT_BY_TENANT_ID = "select count(1) from tenant_info where tenant_id = ?"; +2:134,135c +3:134,135c + @Autowired + private ConfigInfoTagRepository configInfoTagRepository; +====1 +1:116c + private static final String SQL_FIND_CONFIG_INFO_BY_IDS = "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5 FROM config_info WHERE "; +2:137,138c +3:137,138c + @Autowired + private ConfigTagsRelationRepository configTagsRelationRepository; +====1 +1:118c + private static final String SQL_DELETE_CONFIG_INFO_BY_IDS = "DELETE FROM config_info WHERE "; +2:140,141c +3:140,141c + @Autowired + private HisConfigInfoRepository hisConfigInfoRepository; +====1 +1:120c + private static final String PATTERN_STR = "*"; +2:143,144c +3:143,144c + @Autowired + private TenantInfoRepository tenantInfoRepository; +====1 +1:122c + private static final int QUERY_LIMIT_SIZE = 50; +2:146,147c +3:146,147c + @Autowired + private ConfigInfoAggrRepository configInfoAggrRepository; +====1 +1:124,126c + protected JdbcTemplate jt; + + protected TransactionTemplate tjt; +2:149,150c +3:149,150c + @Autowired + private TransactionTemplate tjt; +====1 +1:133,171c + /** + * init datasource. + */ + @PostConstruct + public void init() { + dataSourceService = DynamicDataSource.getInstance().getDataSource(); + + jt = getJdbcTemplate(); + tjt = getTransactionTemplate(); + } + + public boolean checkMasterWritable() { + return dataSourceService.checkMasterWritable(); + } + + public void setBasicDataSourceService(DataSourceService dataSourceService) { + this.dataSourceService = dataSourceService; + } + + public synchronized void reload() throws IOException { + this.dataSourceService.reload(); + } + + /** + * For unit testing. + */ + public JdbcTemplate getJdbcTemplate() { + return this.dataSourceService.getJdbcTemplate(); + } + + public TransactionTemplate getTransactionTemplate() { + return this.dataSourceService.getTransactionTemplate(); + } + + @SuppressWarnings("checkstyle:AbbreviationAsWordInName") + public String getCurrentDBUrl() { + return this.dataSourceService.getCurrentDbUrl(); + } + +2:156a +3:156a +====1 +1:174c + return new ExternalStoragePaginationHelperImpl(jt); +2:159c +3:159c + return null; +====1 +1:182,191c + boolean result = tjt.execute(status -> { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:167,183c +3:167,183c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfoEntity.getGroupId(), + configInfoEntity.getTenantId()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; + } + return Boolean.TRUE; +====1 +1:193c + return Boolean.TRUE; +2:184a +3:184a +====1 +1:202c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:193c +3:193c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:204,207c + jt.update("INSERT INTO config_info_beta(data_id,group_id,tenant_id,app_name,content,md5,beta_ips,src_ip," + + "src_user,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, appNameTmp, configInfo.getContent(), md5, betaIps, srcIp, srcUser, + time, time); +2:195,207c +3:195,207c + ConfigInfoBetaEntity configInfoBeta = new ConfigInfoBetaEntity(); + configInfoBeta.setDataId(configInfo.getDataId()); + configInfoBeta.setGroupId(configInfo.getGroup()); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setBetaIps(betaIps); + configInfoBeta.setMd5(md5); + configInfoBeta.setGmtCreate(time); + configInfoBeta.setGmtModified(time); + configInfoBeta.setSrcUser(srcUser); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setTenantId(tenantTmp); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:209c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:209c +3:209c + log.error("[db-error] " + e.toString(), e); +====1 +1:220c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:219a +3:219a +====1 +1:222,226c + jt.update( + "INSERT INTO config_info_tag(data_id,group_id,tenant_id,tag_id,app_name,content,md5,src_ip,src_user," + + "gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, tagTmp, appNameTmp, configInfo.getContent(), md5, srcIp, srcUser, + time, time); +2:221,234c +3:221,234c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoTagEntity configInfoTag = new ConfigInfoTagEntity(); + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:228c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:236c +3:236c + log.error("[db-error] " + e.toString(), e); +====1 +1:236,254c + boolean result = tjt.execute(status -> { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + /* + If the appName passed by the user is not empty, use the persistent user's appName, + otherwise use db; when emptying appName, you need to pass an empty string + */ + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // delete all tags and then recreate + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); +2:244,268c +3:244,268c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + // 用户传过来的appName不为空,则用持久化用户的appName,否则用db的;清空appName的时候需要传空串 + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + configInfo.setId(oldConfigInfo.getId()); + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // 删除所有tag,然后再重新创建 + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); + } + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:256,259c + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:270c +3:270c + return Boolean.TRUE; +====1 +1:261c + return Boolean.TRUE; +2:271a +3:271a +====1 +1:268c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); +2:277a +3:277a +====1 +1:270,275c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + try { + jt.update( + "UPDATE config_info_beta SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5, srcIp, srcUser, + time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp); +2:279,300c +3:279,300c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(configInfo.getDataId())) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(configInfo.getDataId())); + } + if (StringUtils.isNotBlank(configInfo.getGroup())) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(configInfo.getGroup())); + } + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenantTmp)); + } + ConfigInfoBetaEntity configInfoBeta = configInfoBetaRepository.findOne(booleanBuilder).orElse(null); + try { + String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setMd5(md5); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setSrcUser(srcUser); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:277c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:302c +3:302c + log.error("[db-error] " + e.toString(), e); +====1 +1:288,293c + try { + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + jt.update( + "UPDATE config_info_tag SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", configInfo.getContent(), md5, + srcIp, srcUser, time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp, tagTmp); +2:313,331c +3:313,331c + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + ConfigInfoTagEntity configInfoTag = configInfoTagRepository.findOne( + qConfigInfoTag.dataId.eq(configInfo.getDataId()).and(qConfigInfoTag.groupId.eq(configInfo.getGroup())) + .and(qConfigInfoTag.tenantId.eq(tenantTmp)).and(qConfigInfoTag.tagId.eq(tagTmp))) + .orElse(new ConfigInfoTagEntity()); + try { + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:295c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:333c +3:333c + log.error("[db-error] " + e.toString(), e); +====1 +1:323,330c + try { + jt.update( + "UPDATE config_info SET md5 = ? WHERE data_id=? AND group_id=? AND tenant_id=? AND gmt_modified=?", + md5, dataId, group, tenantTmp, lastTime); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:361,378c +3:361,378c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (lastTime != null) { + booleanBuilder.and(qConfigInfo.gmtModified.eq(lastTime)); + } + configInfoRepository.findOne(booleanBuilder).ifPresent(config -> { + config.setMd5(md5); + configInfoRepository.save(config); + }); +====1 +1:416,421c + tjt.execute(status -> { + try { + ConfigInfo configInfo = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo != null) { + jt.update("DELETE FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, + group, tenantTmp); +2:464,474c +3:464,474c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo4Beta configInfo4Beta = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo4Beta != null) { + configInfoBetaRepository.deleteById(configInfo4Beta.getId()); + } + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:423,425c + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:476c +3:476c + return Boolean.TRUE; +====1 +1:427c + return Boolean.TRUE; +2:477a +3:477a +====1 +1:439,442c + String select = "SELECT content FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + String insert = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + String update = "UPDATE config_info_aggr SET content = ? , gmt_modified = ? WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + +2:489,502c +3:489,502c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } +====1 +1:445,446c + String dbContent = jt + .queryForObject(select, new Object[] {dataId, group, tenantTmp, datumId}, String.class); +2:505c +3:505c + ConfigInfoAggrEntity result = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); +====1 +1:448c + if (dbContent != null && dbContent.equals(content)) { +2:507c +3:507c + if (result.getContent() != null && result.getContent().equals(content)) { +====1 +1:451c + return jt.update(update, content, now, dataId, group, tenantTmp, datumId) > 0; +2:510,513c +3:510,513c + result.setContent(content); + result.setGmtModified(now); + configInfoAggrRepository.save(result); + return true; +====1 +1:454c + return jt.update(insert, dataId, group, tenantTmp, datumId, appNameTmp, content, now) > 0; +2:516,526c +3:516,526c + ConfigInfoAggrEntity configInfoAggrEntity = new ConfigInfoAggrEntity(); + configInfoAggrEntity.setDataId(dataId); + configInfoAggrEntity.setGroupId(group); + configInfoAggrEntity.setDatumId(datumId); + configInfoAggrEntity.setContent(content); + configInfoAggrEntity.setGmtModified(now); + configInfoAggrEntity.setAppName(appNameTmp); + configInfoAggrEntity.setTenantId(tenantTmp); + configInfoAggrRepository.save(configInfoAggrEntity); + return true; + +====1 +1:465,466c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; +2:536a +3:536a +====1 +1:468,482c + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index++, tenantTmp); + ps.setString(index, datumId); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:537a +3:537a +====1 +1:487,502c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=?"; + + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index, tenantTmp); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:542,545c +3:542,545c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); +====1 +1:503a +2:547,553c +3:547,553c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + configInfoAggrRepository.findOne(booleanBuilder).ifPresent(aggr -> configInfoAggrRepository.delete(aggr)); +====1 +1:509,523c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final StringBuilder datumString = new StringBuilder(); + for (String datum : datumList) { + datumString.append("'").append(datum).append("',"); + } + datumString.deleteCharAt(datumString.length() - 1); + final String sql = + "delete from config_info_aggr where data_id=? and group_id=? and tenant_id=? and datum_id in (" + + datumString.toString() + ")"; + try { + jt.update(sql, dataId, group, tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:558a +3:558a +====1 +1:529,536c + String sql = "delete from his_config_info where gmt_modified < ? limit ?"; + PaginationHelper helper = createPaginationHelper(); + try { + helper.updateLimit(sql, new Object[] {startTime, limitSize}); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:564,567c +3:564,567c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository + .findAll(qHisConfigInfo.gmtModified.lt(startTime), PageRequest.of(0, limitSize)); + hisConfigInfoRepository.deleteAll(iterable); +====1 +1:541,542c + String sql = "SELECT COUNT(*) FROM his_config_info WHERE gmt_modified < ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {startTime}); +2:572,573c +3:572,573c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Long result = hisConfigInfoRepository.count(qHisConfigInfo.gmtModified.lt(startTime)); +====1 +1:551c + String sql = "SELECT max(id) FROM config_info"; +2:581a +3:581a +====1 +1:553c + return jt.queryForObject(sql, Long.class); +2:583,584c +3:583,584c + //TODO 关系型特性查询 + return configInfoRepository.findConfigMaxId(); +====1 +1:591,617c + try { + Boolean isReplaceOk = tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + String appNameTmp = appName == null ? "" : appName; + removeAggrConfigInfo(dataId, group, tenant); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + for (Map.Entry datumEntry : datumMap.entrySet()) { + jt.update(sql, dataId, group, tenantTmp, datumEntry.getKey(), appNameTmp, + datumEntry.getValue(), new Timestamp(System.currentTimeMillis())); + } + } catch (Throwable e) { + throw new TransactionSystemException("error in addAggrConfigInfo"); + } + return Boolean.TRUE; + } + }); + if (isReplaceOk == null) { + return false; + } + return isReplaceOk; + } catch (TransactionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:622c +3:622c + return true; +====1 +1:624,636c + String sql = "SELECT DISTINCT data_id, group_id FROM config_info"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:629c +3:629c + return null; +====1 +1:641,651c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,beta_ips FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO4BETA_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:634,647c +3:634,647c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenant)); + } + ConfigInfoBetaEntity configInfoBetaEntity = configInfoBetaRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoBeta data null")); + return ConfigInfo4BetaMapStruct.INSTANCE.convertConfigInfo4Beta(configInfoBetaEntity); +====1 +1:659,668c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,tag_id,app_name,content FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", + new Object[] {dataId, group, tenantTmp, tagTmp}, CONFIG_INFO4TAG_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:655,671c +3:655,671c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + ConfigInfoTagEntity result = configInfoTagRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoTag data null")); + return ConfigInfo4TagMapStruct.INSTANCE.convertConfigInfo4Tag(result); +====1 +1:674,684c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=? AND app_name=?", + new Object[] {dataId, group, tenantTmp, appName}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:677c +3:677c + return null; +====1 +1:690,733c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(group); + paramList.add(tenantTmp); + + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and group_id=? and tenant_id=? "); + if (StringUtils.isNotBlank(configTags)) { + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.group_id=? and a.tenant_id=? "); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sql.append(", "); + } + sql.append("?"); + paramList.add(tagArr[i]); + } + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return this.jt.queryForObject(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:683c +3:683c + return null; +====1 +1:739,748c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, StringUtils.EMPTY}, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:689,695c +3:689,695c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + return configInfoRepository.findOne(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group))).map(s -> { + ConfigInfoBase configInfoBase = new ConfigInfoBase(); + BeanUtils.copyProperties(s, configInfoBase); + configInfoBase.setGroup(s.getGroupId()); + return configInfoBase; + }).orElse(null); +====1 +1:753,762c + try { + return this.jt + .queryForObject("SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE ID=?", + new Object[] {id}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:700,701c +3:700,701c + + return null; +====1 +1:767,776c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5,type FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:706,712c +3:706,712c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:777a +2:714,718c +3:714,718c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity result = configInfoRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfo(result); +====1 +1:783,792c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:724,725c +3:724,725c + + return null; +====1 +1:798,807c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? and app_name=?", + new Object[] {dataId, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:731,732c +3:731,732c + + return null; +====1 +1:813,864c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where data_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:738c +3:738c + return null; +====1 +1:870,871c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); +2:743a +3:743a +====1 +1:874,922c + String sqlCount = "select count(*) from config_info"; + String sql = "select ID,data_id,group_id,tenant_id,app_name,content,type from config_info"; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id"; + sql = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id"; + + where.append(" a.tenant_id=? "); + + if (StringUtils.isNotBlank(dataId)) { + where.append(" and a.data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and a.group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and a.app_name=? "); + paramList.add(appName); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id=? "); + if (StringUtils.isNotBlank(dataId)) { + where.append(" and data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and app_name=? "); + paramList.add(appName); + } +2:746,765c +3:746,765c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + } + + private void buildConfigInfoCommonCondition(BooleanBuilder booleanBuilder, QConfigInfoEntity qConfigInfo, + final String dataId, final String group, final String appName) { + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); +====1 +1:924,929c + try { + return helper.fetchPage(sqlCount + where, sql + where, paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:767,771c +3:767,771c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(appName)) { + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:935,943c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:777c +3:777c + return null; +====1 +1:949,958c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:783c +3:783c + return null; +====1 +1:964,973c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=? and app_name =?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? and app_name =?", + new Object[] {group, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:789c +3:789c + return null; +====1 +1:979,1032c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder( + "select count(*) from config_info where group_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(group); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:795c +3:795c + return null; +====1 +1:1038,1047c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where tenant_id like ? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? and app_name=?", + new Object[] {generateLikeArgument(tenantTmp), appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:801c +3:801c + return null; +====1 +1:1053,1104c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where tenant_id like ? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:807c +3:807c + return null; +====1 +1:1110,1118c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:813c +3:813c + return null; +====1 +1:1123,1124c + String sql = " SELECT COUNT(ID) FROM config_info "; + Integer result = jt.queryForObject(sql, Integer.class); +2:818c +3:818c + Long result = configInfoRepository.count(); +====1 +1:1133,1134c + String sql = " SELECT COUNT(ID) FROM config_info where tenant_id like ?"; + Integer result = jt.queryForObject(sql, new Object[] {tenant}, Integer.class); +2:827,828c +3:827,828c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.tenantId.like(tenant)); +====1 +1:1143,1144c + String sql = " SELECT COUNT(ID) FROM config_info_beta "; + Integer result = jt.queryForObject(sql, Integer.class); +2:837c +3:837c + Long result = configInfoBetaRepository.count(); +====1 +1:1153,1154c + String sql = " SELECT COUNT(ID) FROM config_info_tag "; + Integer result = jt.queryForObject(sql, Integer.class); +2:846c +3:846c + Long result = configInfoTagRepository.count(); +====1 +1:1162,1165c + public List getTenantIdList(int page, int pageSize) { + String sql = "SELECT tenant_id FROM config_info WHERE tenant_id != '' GROUP BY tenant_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:854,864c +3:854,864c + public List getTenantIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("tenantId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1169,1172c + public List getGroupIdList(int page, int pageSize) { + String sql = "SELECT group_id FROM config_info WHERE tenant_id ='' GROUP BY group_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:868,878c +3:868,878c + public List getGroupIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("groupId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1178,1179c + String sql = " SELECT COUNT(ID) FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {dataId, group, tenantTmp}); +2:884,886c +3:884,886c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))); +====1 +1:1188,1213c + if (datumIds == null || datumIds.isEmpty()) { + return 0; + } + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder sql = new StringBuilder( + " SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ? and datum_id"); + if (isIn) { + sql.append(" in ("); + } else { + sql.append(" not in ("); + } + for (int i = 0, size = datumIds.size(); i < size; i++) { + if (i > 0) { + sql.append(", "); + } + sql.append("?"); + } + sql.append(")"); + + List objectList = Lists.newArrayList(dataId, group, tenantTmp); + objectList.addAll(datumIds); + Integer result = jt.queryForObject(sql.toString(), Integer.class, objectList.toArray()); + if (result == null) { + throw new IllegalArgumentException("aggrConfigInfoCount error"); + } + return result.intValue(); +2:895c +3:895c + return 0; +====1 +1:1228,1242c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5 " + + " FROM ( SELECT id FROM config_info WHERE tenant_id like ? ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, + new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:910c +3:910c + return null; +====1 +1:1247,1282c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String select = " SELECT data_id,group_id,app_name FROM ( " + + " SELECT id FROM config_info WHERE tenant_id LIKE ? ORDER BY id LIMIT ?, ? )" + + " g, config_info t WHERE g.id = t.id "; + + final int totalCount = configInfoCount(tenant); + int pageCount = totalCount / pageSize; + if (totalCount > pageSize * pageCount) { + pageCount++; + } + + if (pageNo > pageCount) { + return null; + } + + final Page page = new Page(); + page.setPageNumber(pageNo); + page.setPagesAvailable(pageCount); + page.setTotalCount(totalCount); + + try { + List result = jt + .query(select, new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, + // new Object[0], + CONFIG_KEY_ROW_MAPPER); + + for (ConfigKey item : result) { + page.getPageItems().add(item); + } + return page; + } catch (EmptyResultDataAccessException e) { + return page; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:915c +3:915c + return null; +====1 +1:1288,1300c + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,content,md5" + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) " + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:921c +3:921c + return null; +====1 +1:1305,1319c + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = " SELECT t.id,type,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + + List params = new ArrayList(); + + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, params.toArray(), pageNo, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:926c +3:926c + return null; +====1 +1:1324,1332c + String select = "SELECT id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,type from config_info where id > ? order by id asc limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(select, new Object[] {lastMaxId, 0, pageSize}, 1, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:931,940c +3:931,940c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(qConfigInfo.id.gt(lastMaxId), PageRequest.of(0, pageSize, Sort.by(Sort.Order.asc("id")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1337,1349c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_beta"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,beta_ips " + + " FROM ( SELECT id FROM config_info_beta ORDER BY id LIMIT ?,? )" + + " g, config_info_beta t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:945,952c +3:945,952c + org.springframework.data.domain.Page sPage = configInfoBetaRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoBetaWrapperMapStruct.INSTANCE.convertConfigInfoBetaWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1354,1366c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_tag"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,tag_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info_tag ORDER BY id LIMIT ?,? ) " + + "g, config_info_tag t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:957,964c +3:957,964c + org.springframework.data.domain.Page sPage = configInfoTagRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoTagWrapperMapStruct.INSTANCE.convertConfigInfoTagWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1372,1414c + // assert dataids group not null + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + // if dataids empty return empty list + if (CollectionUtils.isEmpty(dataIds)) { + return Collections.emptyList(); + } + + // Batch query limit + // The number of in is controlled within 100, the shorter the length of the SQL statement, the better + if (subQueryLimit > QUERY_LIMIT_SIZE) { + subQueryLimit = 50; + } + List result = new ArrayList(dataIds.size()); + + String sqlStart = "select data_id, group_id, tenant_id, app_name, content from config_info where group_id = ? and tenant_id = ? and data_id in ("; + String sqlEnd = ")"; + StringBuilder subQuerySql = new StringBuilder(); + + for (int i = 0; i < dataIds.size(); i += subQueryLimit) { + // dataids + List params = new ArrayList( + dataIds.subList(i, i + subQueryLimit < dataIds.size() ? i + subQueryLimit : dataIds.size())); + + for (int j = 0; j < params.size(); j++) { + subQuerySql.append("?"); + if (j != params.size() - 1) { + subQuerySql.append(","); + } + } + + // group + params.add(0, group); + params.add(1, tenantTmp); + + List r = this.jt + .query(sqlStart + subQuerySql.toString() + sqlEnd, params.toArray(), CONFIG_INFO_ROW_MAPPER); + + // assert not null + if (r != null && r.size() > 0) { + result.addAll(r); + } + } + return result; +2:970c +3:970c + return null; +====1 +1:1420,1463c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + if (StringUtils.isBlank(appName)) { + return this.findAllConfigInfo(pageNo, pageSize, tenantTmp); + } else { + return this.findConfigInfoByApp(pageNo, pageSize, tenantTmp, appName); + } + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + where += " and tenant_id like ? "; + params.add(generateLikeArgument(tenantTmp)); + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:976c +3:976c + return null; +====1 +1:1469,1562c + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + for (ConfigKey configInfo : configKeys) { + String dataId = configInfo.getDataId(); + String group = configInfo.getGroup(); + String appName = configInfo.getAppName(); + + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + return helper.fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:982c +3:982c + return null; +====1 +1:1572,1636c + PaginationHelper helper = createPaginationHelper(); + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info"; + StringBuilder where = new StringBuilder(" where "); + List params = new ArrayList(); + params.add(generateLikeArgument(tenantTmp)); + if (StringUtils.isNotBlank(configTags)) { + sqlCountRows = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id "; + sqlFetchRows = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join config_tags_relation b on a.id=b.id "; + + where.append(" a.tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and a.data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and a.group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and a.app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and a.content like ? "); + params.add(generateLikeArgument(content)); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + params.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and content like ? "); + params.add(generateLikeArgument(content)); + } + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:992,1008c +3:992,1008c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.like(tenant)); + } + if (StringUtils.isNotBlank(content)) { + booleanBuilder.and(qConfigInfo.content.like(content)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1642,1672c + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + throw new IOException("invalid param"); + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,content from config_info where "; + String where = " 1=1 and tenant_id='' "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1014c +3:1014c + return null; +====1 +1:1678,1691c + String sql = "SELECT id,data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; + + try { + return this.jt + .queryForObject(sql, new Object[] {dataId, group, tenantTmp, datumId}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + // EmptyResultDataAccessException, indicating that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); +2:1020,1026c +3:1020,1026c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); +====1 +1:1692a +2:1028,1036c +3:1028,1036c + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenantTmp)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } + + ConfigInfoAggrEntity configInfoAggrEntity = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggr(configInfoAggrEntity); +====1 +1:1697,1710c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "SELECT data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? ORDER BY datum_id"; + + try { + return this.jt.query(sql, new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1041c +3:1041c + return null; +====1 +1:1717,1730c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where data_id=? and " + + "group_id=? and tenant_id=? order by datum_id limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, new Object[] {dataId, group, tenantTmp}, sqlFetchRows, + new Object[] {dataId, group, tenantTmp, (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_AGGR_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1048,1058c +3:1048,1058c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + org.springframework.data.domain.Page sPage = configInfoAggrRepository.findAll( + qConfigInfoAggr.dataId.eq(dataId).and(qConfigInfoAggr.groupId.eq(group)) + .and(qConfigInfoAggr.tenantId.eq(tenantTmp)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.by("datumId")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggrList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1737,1831c + String sqlCountRows = "select count(*) from config_info_aggr where "; + String sqlFetchRows = "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + + for (ConfigKey configInfoAggr : configKeys) { + String dataId = configInfoAggr.getDataId(); + String group = configInfoAggr.getGroup(); + String appName = configInfoAggr.getAppName(); + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + Page result = helper + .fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_AGGR_ROW_MAPPER); + return result; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1065c +3:1065c + return null; +====1 +1:1836,1848c + String sql = "SELECT DISTINCT data_id, group_id, tenant_id FROM config_info_aggr"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_CHANGED_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1070,1071c +3:1070,1071c + List list = configInfoAggrRepository.findAllAggrGroup(); + return ConfigInfoChangedMapStruct.INSTANCE.convertConfigInfoChangedList(list); +====1 +1:1853,1864c + String sql = "SELECT datum_id FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND content = ? "; + + try { + return this.jt.queryForList(sql, new Object[] {dataId, groupId, content}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1076c +3:1076c + return null; +====1 +1:1869,1877c + try { + List> list = jt.queryForList( + "SELECT data_id, group_id, tenant_id, app_name, content, gmt_modified FROM config_info WHERE gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertChangeConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1081,1084c +3:1081,1084c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Iterable iterable = configInfoRepository + .findAll(qConfigInfo.gmtModified.goe(startTime).and(qConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList((List) iterable); +====1 +1:1884,1924c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_modified from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + if (!StringUtils.isBlank(tenantTmp)) { + where += " and tenant_id = ? "; + params.add(tenantTmp); + } + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (startTime != null) { + where += " and gmt_modified >=? "; + params.add(startTime); + } + if (endTime != null) { + where += " and gmt_modified <=? "; + params.add(endTime); + } + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + lastMaxId, CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1091c +3:1091c + return null; +====1 +1:1929,1937c + try { + List> list = jt.queryForList( + "SELECT DISTINCT data_id, group_id, tenant_id FROM his_config_info WHERE op_type = 'D' AND gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertDeletedConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1096,1100c +3:1096,1100c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository.findAll( + qHisConfigInfo.opType.eq("D").and(qHisConfigInfo.gmtModified.goe(startTime)) + .and(qHisConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList((List) iterable); +====1 +1:1943,1947c + final String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + final String tenantTmp = + StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + +2:1105a +3:1105a +====1 +1:1953,1960c + + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + + KeyHolder keyHolder = new GeneratedKeyHolder(); + + final String sql = + "INSERT INTO config_info(data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_create," + + "gmt_modified,c_desc,c_use,effect,type,c_schema) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; +2:1111,1120c +3:1111,1120c + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setGmtCreate(time); + configInfoEntity.setGmtModified(time); +====1 +1:1963,1991c + jt.update(new PreparedStatementCreator() { + @Override + public PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + PreparedStatement ps = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS); + ps.setString(1, configInfo.getDataId()); + ps.setString(2, configInfo.getGroup()); + ps.setString(3, tenantTmp); + ps.setString(4, appNameTmp); + ps.setString(5, configInfo.getContent()); + ps.setString(6, md5Tmp); + ps.setString(7, srcIp); + ps.setString(8, srcUser); + ps.setTimestamp(9, time); + ps.setTimestamp(10, time); + ps.setString(11, desc); + ps.setString(12, use); + ps.setString(13, effect); + ps.setString(14, type); + ps.setString(15, schema); + return ps; + } + }, keyHolder); + Number nu = keyHolder.getKey(); + if (nu == null) { + throw new IllegalArgumentException("insert config_info fail"); + } + return nu.longValue(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:1123,1124c +3:1123,1124c + return configInfoRepository.save(configInfoEntity).getId(); + } catch (Exception e) { +====1 +1:2008,2015c + try { + jt.update( + "INSERT INTO config_tags_relation(id,tag_name,tag_type,data_id,group_id,tenant_id) VALUES(?,?,?,?,?,?)", + configId, tagName, null, dataId, group, tenant); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1141,1147c +3:1141,1147c + ConfigTagsRelationEntity configTagsRelation = new ConfigTagsRelationEntity(); + configTagsRelation.setId(configId); + configTagsRelation.setTagName(tagName); + configTagsRelation.setDataId(dataId); + configTagsRelation.setGroupId(group); + configTagsRelation.setTenantId(tenant); + configTagsRelationRepository.save(configTagsRelation); +====1 +1:2020,2025c + try { + jt.update("DELETE FROM config_tags_relation WHERE id=?", id); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1152c +3:1152c + configTagsRelationRepository.findById(id).ifPresent(s -> configTagsRelationRepository.delete(s)); +====1 +1:2030,2040c + String sql = "SELECT tag_name FROM config_tags_relation WHERE tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1157c +3:1157c + return null; +====1 +1:2045,2055c + String sql = "SELECT tag_name FROM config_tags_relation WHERE data_id=? AND group_id=? AND tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {dataId, group, tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1162,1176c +3:1162,1176c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigTagsRelationEntity qConfigTagsRelation = QConfigTagsRelationEntity.configTagsRelationEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigTagsRelation.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigTagsRelation.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigTagsRelation.tenantId.eq(tenant)); + } + Iterable iterable = configTagsRelationRepository.findAll(booleanBuilder); + List result = new ArrayList<>(); + iterable.forEach(s -> result.add(s.getTagName())); + return result; +====1 +1:2061,2067c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + jt.update("DELETE FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, group, + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1182,1187c +3:1182,1187c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); +====1 +1:2068a +2:1189,1190c +3:1189,1190c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + configInfos.forEach(s -> configInfoRepository.delete(s)); +====1 +1:2076,2077c + StringBuilder sql = new StringBuilder(SQL_DELETE_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1198,1200c +3:1198,1200c + if (StringUtils.isBlank(ids)) { + return; + } +====1 +1:2081,2084c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1203a +3:1203a +====1 +1:2087,2093c + sql.append(") "); + try { + jt.update(sql.toString(), paramList.toArray()); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1206,1219c +3:1206,1219c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + for (Long id : paramList) { + configInfoRepository.deleteById(id); + } + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2099,2106c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String tagTmp = StringUtils.isBlank(tag) ? StringUtils.EMPTY : tag; + try { + jt.update("DELETE FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", dataId, + group, tenantTmp, tagTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1225,1234c +3:1225,1234c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); +====1 +1:2107a +2:1236,1251c +3:1236,1251c + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + Iterable configInfoTags = configInfoTagRepository.findAll(booleanBuilder); + configInfoTags.forEach(s -> configInfoTagRepository.delete(s)); + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2113,2115c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String tenantTmp = StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:1257,1258c +3:1257,1258c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:2122,2131c + try { + jt.update("UPDATE config_info SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?," + + "app_name=?,c_desc=?,c_use=?,effect=?,type=?,c_schema=? " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5Tmp, srcIp, srcUser, + time, appNameTmp, desc, use, effect, type, schema, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1265,1272c +3:1265,1272c + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setGmtModified(time); + configInfoRepository.save(configInfoEntity); +====1 +1:2139,2140c + StringBuilder sql = new StringBuilder(SQL_FIND_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1279a +3:1279a +====1 +1:2144,2147c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1282a +3:1282a +====1 +1:2150,2158c + sql.append(") "); + try { + return this.jt.query(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1285,1288c +3:1285,1288c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + List list = (List) configInfoRepository + .findAll(qConfigInfo.id.in(paramList)); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(list); +====1 +1:2163,2176c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAdvanceInfo configAdvance = this.jt.queryForObject( + "SELECT gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_ADVANCE_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1293,1314c +3:1293,1314c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAdvanceInfo configAdvance = ConfigAdvanceInfoMapStruct.INSTANCE.convertConfigAdvanceInfo(configInfo); + List configTagList = this.selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2178c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1315a +3:1315a +====1 +1:2180,2185c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1317c +3:1317c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2186a +2:1319c +3:1319c + return configAdvance; +====1 +1:2191,2206c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAllInfo configAdvance = this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5," + + "gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_ALL_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1324,1346c +3:1324,1346c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAllInfo configAdvance = ConfigAllInfoMapStruct.INSTANCE.convertConfigAllInfo(configInfo); + configAdvance.setGroup(configInfo.getGroupId()); + List configTagList = selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2208c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1347a +3:1347a +====1 +1:2210,2215c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1349c +3:1349c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2216a +2:1351c +3:1351c + return configAdvance; +====1 +1:2225,2233c + try { + jt.update( + "INSERT INTO his_config_info (id,data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_modified,op_type) " + + "VALUES(?,?,?,?,?,?,?,?,?,?,?)", id, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp, appNameTmp, configInfo.getContent(), md5Tmp, srcIp, srcUser, time, ops); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1360,1373c +3:1360,1373c + HisConfigInfoEntity hisConfigInfo = new HisConfigInfoEntity(); + hisConfigInfo.setId(id); + hisConfigInfo.setDataId(configInfo.getDataId()); + hisConfigInfo.setGroupId(configInfo.getGroup()); + hisConfigInfo.setAppName(appNameTmp); + hisConfigInfo.setContent(configInfo.getContent()); + hisConfigInfo.setMd5(md5Tmp); + hisConfigInfo.setGmtModified(time); + hisConfigInfo.setSrcUser(srcUser); + hisConfigInfo.setSrcIp(srcIp); + hisConfigInfo.setOpType(ops); + hisConfigInfo.setTenantId(tenantTmp); + hisConfigInfo.setGmtCreate(time); + hisConfigInfoRepository.save(hisConfigInfo); +====1 +1:2239,2255c + PaginationHelper helper = createPaginationHelper(); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from his_config_info where data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select nid,data_id,group_id,tenant_id,app_name,src_ip,src_user,op_type,gmt_create,gmt_modified from his_config_info " + + "where data_id = ? and group_id = ? and tenant_id = ? order by nid desc"; + + Page page = null; + try { + page = helper + .fetchPage(sqlCountRows, sqlFetchRows, new Object[] {dataId, group, tenantTmp}, pageNo, pageSize, + HISTORY_LIST_ROW_MAPPER); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG + .error("[list-config-history] error, dataId:{}, group:{}", new Object[] {dataId, group}, e); + throw e; + } +2:1379,1389c +3:1379,1389c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + org.springframework.data.domain.Page sPage = hisConfigInfoRepository.findAll( + qHisConfigInfo.dataId.eq(dataId).and(qHisConfigInfo.groupId.eq(group)) + .and(qHisConfigInfo.tenantId.eq(tenant)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("nid")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); +====1 +1:2262,2270c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "INSERT INTO app_configdata_relation_subs(data_id,group_id,app_name,gmt_modified) VALUES(?,?,?,?)", + dataId, group, appNameTmp, date); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1395a +3:1395a +====1 +1:2276,2284c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "UPDATE app_configdata_relation_subs SET gmt_modified=? WHERE data_id=? AND group_id=? AND app_name=?", + time, dataId, group, appNameTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1401c +3:1401c + +==== +1:2289c + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; +2:1406,1434c + <<<<<<< HEAD + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + ||||||| a41d209d5 + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[list-config-history] error, nid:{}", new Object[] {nid}, e); + throw e; + } + ======= + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[detail-config-history] error, nid:{}", new Object[] {nid}, e); + throw e; + } + } + + @Override + public ConfigHistoryInfo detailPreviousConfigHistory(Long id) { + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = (select max(nid) from his_config_info where id = ?) "; +3:1406,1413c + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + } + + @Override + public ConfigHistoryInfo detailPreviousConfigHistory(Long id) { + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = (select max(nid) from his_config_info where id = ?) "; +====1 +1:2292c + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); +2:1437c +3:1416c + .queryForObject(sqlFetchRows, new Object[] {id}, HISTORY_DETAIL_ROW_MAPPER); +====1 +1:2295c + LogUtil.FATAL_LOG.error("[list-config-history] error, nid:{}", new Object[] {nid}, e); +2:1440c +3:1419c + LogUtil.FATAL_LOG.error("[detail-previous-config-history] error, id:{}", new Object[] {id}, e); +====2 +1:2297a +3:1421a +2:1443c + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:2303,2310c + try { + jt.update( + "INSERT INTO tenant_info(kp,tenant_id,tenant_name,tenant_desc,create_source,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?)", + kp, tenantId, tenantName, tenantDesc, createResoure, time, time); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1449,1457c +3:1427,1435c + TenantInfoEntity tenantInfo = new TenantInfoEntity(); + tenantInfo.setKp(kp); + tenantInfo.setTenantId(tenantId); + tenantInfo.setTenantName(tenantName); + tenantInfo.setTenantDesc(tenantDesc); + tenantInfo.setCreateSource(createResoure); + tenantInfo.setGmtCreate(time); + tenantInfo.setGmtModified(time); + tenantInfoRepository.save(tenantInfo); +====1 +1:2315,2322c + try { + jt.update( + "UPDATE tenant_info SET tenant_name = ?, tenant_desc = ?, gmt_modified= ? WHERE kp=? AND tenant_id=?", + tenantName, tenantDesc, System.currentTimeMillis(), kp, tenantId); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1462,1467c +3:1440,1445c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + tenantInfoRepository.findOne(qTenantInfo.kp.eq(kp).and(qTenantInfo.tenantId.eq(tenantId))).ifPresent(s -> { + s.setTenantName(tenantName); + s.setTenantDesc(tenantDesc); + tenantInfoRepository.save(s); + }); +====1 +1:2327,2338c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=?"; + try { + return this.jt.query(sql, new Object[] {kp}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1472,1473c +3:1450,1451c + List list = tenantInfoRepository.findByKp(kp); + return TenantInfoMapStruct.INSTANCE.convertTenantInfoList(list); +====1 +1:2343,2354c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=? AND tenant_id=?"; + try { + return jt.queryForObject(sql, new Object[] {kp, tenantId}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1478,1479c +3:1456,1457c + TenantInfoEntity tenantInfoEntity = tenantInfoRepository.findByKpAndTenantId(kp, tenantId); + return TenantInfoMapStruct.INSTANCE.convertTenantInfo(tenantInfoEntity); +====1 +1:2359,2364c + try { + jt.update("DELETE FROM tenant_info WHERE kp=? AND tenant_id=?", kp, tenantId); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1484,1485c +3:1462,1463c + tenantInfoRepository.findOne(QTenantInfoEntity.tenantInfoEntity.tenantId.eq(tenantId) + .and(QTenantInfoEntity.tenantInfoEntity.kp.eq(kp))).ifPresent(s -> tenantInfoRepository.delete(s)); +====1 +1:2418,2431c + String sqlCountRows = " SELECT COUNT(*) FROM config_info "; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,md5,type,gmt_modified FROM " + + "( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) g, config_info t WHERE g.id = t.id"; + PaginationHelper helper = createPaginationHelper(); + try { + Page page = helper + .fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_WRAPPER_ROW_MAPPER); + + return page.getPageItems(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1539c +3:1517c + return null; +====1 +1:2448,2458c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,type,gmt_modified,md5 FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1556,1560c +3:1534,1538c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + ConfigInfoEntity result = configInfoRepository.findOne( + qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))).orElse(null); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapper(result); +====1 +1:2463,2469c + String sql = String.format("select 1 from %s limit 1", tableName); + try { + jt.queryForObject(sql, Integer.class); + return true; + } catch (Throwable e) { + return false; + } +2:1565c +3:1543c + return true; +====1 +1:2518,2530c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList<>(); + if (!CollectionUtils.isEmpty(ids)) { + where.append(" id in ("); + for (int i = 0; i < ids.size(); i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(ids.get(i)); + } + where.append(") "); +2:1614,1617c +3:1592,1595c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (!org.springframework.util.CollectionUtils.isEmpty(ids)) { + booleanBuilder.and(qConfigInfo.id.in(ids)); +====1 +1:2532,2536c + where.append(" tenant_id=? "); + paramList.add(tenantTmp); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + paramList.add(generateLikeArgument(dataId)); +2:1619,1623c +3:1597,1601c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.like(dataId)); +====1 +1:2539,2540c + where.append(" and group_id=? "); + paramList.add(group); +2:1626c +3:1604c + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:2543,2544c + where.append(" and app_name=? "); + paramList.add(appName); +2:1629c +3:1607c + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:2547,2552c + try { + return this.jt.query(SQL_FIND_ALL_CONFIG_INFO + where, paramList.toArray(), CONFIG_ALL_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1632,1640c +3:1610,1618c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + List resultList = new ArrayList<>(); + configInfos.forEach(s -> { + ConfigAllInfo configAllInfo = new ConfigAllInfo(); + BeanUtils.copyProperties(s, configAllInfo); + configAllInfo.setGroup(s.getGroupId()); + resultList.add(configAllInfo); + }); + return resultList; +====1 +1:2643,2647c + Integer result = this.jt + .queryForObject(SQL_TENANT_INFO_COUNT_BY_TENANT_ID, new String[] {tenantId}, Integer.class); + if (result == null) { + return 0; + } +2:1731,1732c +3:1709,1710c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + Long result = tenantInfoRepository.count(qTenantInfo.tenantId.eq(tenantId)); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_HistoryController.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_HistoryController.java.txt new file mode 100644 index 0000000000..b506cbd739 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_HistoryController.java.txt @@ -0,0 +1,120 @@ +====1 +1:31,33c + import javax.servlet.http.HttpServletRequest; + import javax.servlet.http.HttpServletResponse; + +2:30a +3:30a +====2 +1:42c +3:39c + +2:39c + +====2 +1:45c +3:42c + +2:42c + +====1 +1:49,53c + * @param dataId dataId string value. + * @param group group string value. + * @param tenant tenant string value. + * @param appName appName string value. + * @param pageNo pageNo string value. +2:46,50c +3:46,50c + * @param dataId dataId string value. + * @param group group string value. + * @param tenant tenant string value. + * @param appName appName string value. + * @param pageNo pageNo string value. +====2 +1:60,66c +3:57,63c + @RequestParam("group") String group, // + @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant, + @RequestParam(value = "appName", required = false) String appName, + @RequestParam(value = "pageNo", required = false) Integer pageNo, + // + @RequestParam(value = "pageSize", required = false) Integer pageSize, // + ModelMap modelMap) { +2:57,63c + @RequestParam("group") String group, // + @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant, + @RequestParam(value = "appName", required = false) String appName, + @RequestParam(value = "pageNo", required = false) Integer pageNo, + // + @RequestParam(value = "pageSize", required = false) Integer pageSize, // + ModelMap modelMap) { +====2 +1:73c +3:70c + +2:70c + +====1 +1:75c + * Query the detailed configuration history informations. +2:72,75c +3:72,75c + * Query the detailed configuration history information. + * + * @param nid history_config_info nid + * @return history config info +==== +1:78,79c + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { +2:78,86c + <<<<<<< HEAD + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { + ||||||| a41d209d5 + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { + ======= + public ConfigHistoryInfo getConfigHistoryInfo(@RequestParam("nid") Long nid) { + >>>>>>> TEMP_RIGHT_BRANCH +3:78c + public ConfigHistoryInfo getConfigHistoryInfo(@RequestParam("nid") Long nid) { +====2 +1:81a +3:80a +2:89,93c + <<<<<<< HEAD + + ||||||| a41d209d5 + + ======= +==== +1:82a +2:95,107c + /** + * Query previous config history information. + * + * @param id config_info id + * @return history config info + * @since 1.4.0 + */ + @GetMapping(value = "/previous") + public ConfigHistoryInfo getPreviousConfigHistoryInfo(@RequestParam("id") Long id) { + return persistService.detailPreviousConfigHistory(id); + } + + >>>>>>> TEMP_RIGHT_BRANCH +3:82,93c + /** + * Query previous config history information. + * + * @param id config_info id + * @return history config info + * @since 1.4.0 + */ + @GetMapping(value = "/previous") + public ConfigHistoryInfo getPreviousConfigHistoryInfo(@RequestParam("id") Long id) { + return persistService.detailPreviousConfigHistory(id); + } + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_MergeDatumService.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_MergeDatumService.java.txt new file mode 100644 index 0000000000..1d2e6e10d3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_MergeDatumService.java.txt @@ -0,0 +1,43 @@ +====1 +1:30,31c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:30,32c +3:30,32c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.ApplicationUtils; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:109c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIp()); +2:110c +3:110c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIP()); +====1 +1:117c + if (ApplicationUtils.getStandaloneMode()) { +2:118c +3:118c + if (EnvUtil.getStandaloneMode()) { +==== +1:166c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); +2:167,177c + <<<<<<< HEAD + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn( + "[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); + ||||||| a41d209d5 + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn("[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); + ======= + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIP(), null); +3:167c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIP(), null); +====2 +1:168a +3:169a +2:180c + >>>>>>> TEMP_RIGHT_BRANCH diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_MergeTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_MergeTaskProcessor.java.txt new file mode 100644 index 0000000000..5df60ac501 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_MergeTaskProcessor.java.txt @@ -0,0 +1,47 @@ +==== +1:19a +2:20,26c + <<<<<<< HEAD + import com.alibaba.nacos.common.task.AbstractDelayTask; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; + ======= + import com.alibaba.nacos.common.task.NacosTask; +3:20c + import com.alibaba.nacos.common.task.NacosTask; +==== +1:21c + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:28c + >>>>>>> TEMP_RIGHT_BRANCH +3:21a +====1 +1:22a +2:30c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:31c + import com.alibaba.nacos.core.utils.InetUtils; +2:39c +3:32c + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:55c + public boolean process(AbstractDelayTask task) { +2:63c +3:56c + public boolean process(NacosTask task) { +====1 +1:87c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +2:95c +3:88c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), +====1 +1:101c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +2:109c +3:102c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_NotifyTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_NotifyTaskProcessor.java.txt new file mode 100644 index 0000000000..9dcd6442e3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_NotifyTaskProcessor.java.txt @@ -0,0 +1,67 @@ +==== +1:19a +2:20,26c + <<<<<<< HEAD + import com.alibaba.nacos.common.task.AbstractDelayTask; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; + ======= + import com.alibaba.nacos.common.task.NacosTask; +3:20c + import com.alibaba.nacos.common.task.NacosTask; +==== +1:21c + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:28c + >>>>>>> TEMP_RIGHT_BRANCH +3:21a +====1 +1:22a +2:30c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:27,28c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:35,36c +3:28,29c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:49c + public boolean process(AbstractDelayTask task) { +2:57c +3:50c + public boolean process(NacosTask task) { +====1 +1:76c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +2:84c +3:77c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====1 +1:78c + .format(URL_PATTERN, serverIp, ApplicationUtils.getContextPath(), dataId, group); +2:86c +3:79c + .format(URL_PATTERN, serverIp, EnvUtil.getContextPath(), dataId, group); +====1 +1:82c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:90c +3:83c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====1 +1:92c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:100c +3:93c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====1 +1:100c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:108c +3:101c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_application.properties.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_application.properties.txt new file mode 100644 index 0000000000..d78b420770 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_application.properties.txt @@ -0,0 +1,89 @@ +====1 +1:40,41c + # db.user=nacos + # db.password=nacos +2:40,41c +3:40,41c + # db.user.0=nacos + # db.password.0=nacos +====1 +1:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-fe/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +2:112c +3:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-ui/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +==== +1:177a +2:178,214c + + <<<<<<< HEAD + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + + ||||||| a41d209d5 + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:178,211c + + + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_pom.xml.txt new file mode 100644 index 0000000000..99b5611a8e --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_minimal/diff_pom.xml.txt @@ -0,0 +1,169 @@ +====1 +1:25c + 1.4.0-SNAPSHOT +2:25c +3:25c + 1.4.1-SNAPSHOT +====1 +1:39c + nacos-all-1.4.0-SNAPSHOT +2:39c +3:39c + nacos-all-1.4.1-SNAPSHOT +====1 +1:129c + 2.1.16.RELEASE +2:129c +3:129c + 2.1.17.RELEASE +====1 +1:131c + 2.6 +2:130a +3:130a +====1 +1:133c + 2.2 +2:132c +3:132c + 2.6 +====1 +1:144c + 1.7.17 +2:142a +3:142a +====1 +1:170a +2:169,175c +3:169,175c + 1.3.2.beta1 + 1.3.2.beta1 + 1.3.1.Final + 19.3.0.0 + 4.2.1 + 3.4.1 + 1.18.12 +====1 +1:279a +2:285,287c +3:285,287c + **/com/alibaba/nacos/config/server/modules/entity/*.java + **/com/alibaba/nacos/config/server/modules/mapstruct/*.java + **/com/alibaba/nacos/config/server/configuration/datasource/DynamicDataSource.java +==== +1:307c + **/istio/model/**,**/nacos/test/** +2:315,321c + <<<<<<< HEAD + **/istio/model/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** + ||||||| a41d209d5 + **/istio/model/**,**/nacos/test/** + ======= + **/istio/model/**,**/consistency/entity/**,**/nacos/test/** + >>>>>>> TEMP_RIGHT_BRANCH +3:315c + **/istio/model/**,**/consistency/entity/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** +====1 +1:341a +2:356c +3:350c + /console-ui/** +====1 +1:553a +2:569,587c +3:563,581c + + remove-test-data + + + + org.apache.maven.plugins + maven-clean-plugin + + false + + + ${user.home}/nacos/data + + + + + + + +====1 +1:581a +2:616c +3:610c + sys +====1 +1:688a +2:724,728c +3:718,722c + + ${project.groupId} + nacos-sys + ${project.version} + +====1 +1:712,717c + + commons-lang + commons-lang + ${commons-lang.version} + + +2:751a +3:745a +====1 +1:817,822c + + com.ning + async-http-client + ${async-http-client.version} + + +2:850a +3:844a +====1 +1:1027a +2:1056,1093c +3:1050,1087c + + + org.mapstruct + mapstruct-jdk8 + ${mapstruct.version} + + + + org.mapstruct + mapstruct-processor + ${mapstruct.version} + + + + org.projectlombok + lombok + true + ${lombok.version} + + + + com.querydsl + querydsl-jpa + ${querydsl.version} + + + + com.zaxxer + HikariCP + ${hikariCP.version} + + + + com.oracle.ojdbc + ojdbc8 + ${ojdbc.version} + + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_AsyncNotifyService.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_AsyncNotifyService.java.txt new file mode 100644 index 0000000000..f5fe26eb1d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_AsyncNotifyService.java.txt @@ -0,0 +1,83 @@ +====1 +1:35,36c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:35,36c +3:35,36c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:107c + +2:107c +3:107c + +====1 +1:130c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +2:130c +3:130c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +====2 +1:135a +3:135a +2:136,143c + <<<<<<< HEAD + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, + String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); + ||||||| a41d209d5 + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); + ======= +==== +1:137c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +2:145,146c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); + >>>>>>> TEMP_RIGHT_BRANCH +3:137c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====1 +1:157c + +2:166c +3:157c + +====1 +1:171c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +2:180c +3:171c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +====1 +1:177c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +2:186c +3:177c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +====1 +1:199c + InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +2:208c +3:199c + InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +====1 +1:262c + this.url = MessageFormat.format(URL_PATTERN, target, ApplicationUtils.getContextPath(), dataId, group); +2:271c +3:262c + this.url = MessageFormat.format(URL_PATTERN, target, EnvUtil.getContextPath(), dataId, group); +====1 +1:265c + .format(URL_PATTERN_TENANT, target, ApplicationUtils.getContextPath(), dataId, group, tenant); +2:274c +3:265c + .format(URL_PATTERN_TENANT, target, EnvUtil.getContextPath(), dataId, group, tenant); +====1 +1:312c + } +\ No newline at end of file +2:321c +3:312c + } diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_ConfigController.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_ConfigController.java.txt new file mode 100644 index 0000000000..c1619171c1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_ConfigController.java.txt @@ -0,0 +1,193 @@ +====1 +1:18a +2:19c +3:19c + import com.alibaba.nacos.api.config.ConfigType; +====1 +1:22a +2:24c +3:24c + import com.alibaba.nacos.common.model.RestResultUtils; +==== +1:34a +2:37,38c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; + <<<<<<< HEAD +3:37c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +====2 +1:35a +3:38a +2:40,43c + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.result.ResultBuilder; + ======= + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:38c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +2:45a +3:40a +====1 +1:45a +2:53c +3:48c + import com.alibaba.nacos.common.utils.NamespaceUtil; +====1 +1:48c + import com.alibaba.nacos.core.utils.InetUtils; +2:56c +3:51c + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:93,94c + private static final String NAMESPACE_PUBLIC_KEY = "public"; + +2:100a +3:95a +====1 +1:137a +2:144,147c +3:139,142c + //check type + if (!ConfigType.isValidType(type)) { + type = ConfigType.getDefaultType().getType(); + } +====1 +1:178c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:188c +3:183c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:199c + tenant = processTenant(tenant); +2:209c +3:204c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====1 +1:283c + return ResultBuilder.buildSuccessResult(true); +2:293c +3:288c + return RestResultUtils.success(true); +====1 +1:472c + tenant = processTenant(tenant); +2:482c +3:477c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====1 +1:527c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:537c +3:532c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:530,534c + if (StringUtils.isNotBlank(namespace)) { + if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); + } +2:540,543c +3:535,538c + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====1 +1:535a +2:545c +3:540c + +====1 +1:548c + return ResultBuilder.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +2:558c +3:553c + return RestResultUtils.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +====1 +1:560c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +2:570c +3:565c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +====1 +1:584c + return ResultBuilder.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +2:594c +3:589c + return RestResultUtils.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +====1 +1:588c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:598c +3:593c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:601c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:611c +3:606c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:604c + return ResultBuilder.buildSuccessResult("导入成功", saveResult); +2:614c +3:609c + return RestResultUtils.success("导入成功", saveResult); +====1 +1:628c + return ResultBuilder.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +2:638c +3:633c + return RestResultUtils.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +====1 +1:631,634c + + if (NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(namespace)) { + namespace = ""; + } else if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { +2:641,643c +3:636,638c + + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { +====1 +1:636c + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +2:645c +3:640c + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====1 +1:650c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:659c +3:654c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:674c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:683c +3:678c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:687c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:696c +3:691c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:690,697c + return ResultBuilder.buildSuccessResult("Clone Completed Successfully", saveResult); + } + + private String processTenant(String tenant) { + if (StringUtils.isEmpty(tenant) || NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(tenant)) { + return ""; + } + return tenant; +2:699c +3:694c + return RestResultUtils.success("Clone Completed Successfully", saveResult); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_DiskUtils.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_DiskUtils.java.txt new file mode 100644 index 0000000000..edbfb2552d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_DiskUtils.java.txt @@ -0,0 +1,24 @@ +356,364d355 +< <<<<<<< HEAD:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileOutputStream fos = new FileOutputStream( +< outputFile); final CheckedOutputStream cos = new CheckedOutputStream(fos, +< checksum); final ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(cos))) { +< ||||||| a41d209d5:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileOutputStream fos = new FileOutputStream( +< outputFile); final CheckedOutputStream cos = new CheckedOutputStream(fos, checksum); +< final ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(cos))) { +< ======= +368d358 +< >>>>>>> TEMP_RIGHT_BRANCH:sys/src/main/java/com/alibaba/nacos/sys/utils/DiskUtils.java +407,415d396 +< <<<<<<< HEAD:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileInputStream fis = new FileInputStream( +< sourceFile); final CheckedInputStream cis = new CheckedInputStream(fis, +< checksum); final ZipInputStream zis = new ZipInputStream(new BufferedInputStream(cis))) { +< ||||||| a41d209d5:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileInputStream fis = new FileInputStream( +< sourceFile); final CheckedInputStream cis = new CheckedInputStream(fis, checksum); +< final ZipInputStream zis = new ZipInputStream(new BufferedInputStream(cis))) { +< ======= +419d399 +< >>>>>>> TEMP_RIGHT_BRANCH:sys/src/main/java/com/alibaba/nacos/sys/utils/DiskUtils.java diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_EmbeddedRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_EmbeddedRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..465add68ba --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_EmbeddedRolePersistServiceImpl.java.txt @@ -0,0 +1,186 @@ +====1 +1:21c + import com.alibaba.nacos.config.server.service.repository.embedded.EmbeddedStoragePersistServiceImpl; +2:20a +3:20a +====1 +1:23a +2:23c +3:23c + import com.alibaba.nacos.config.server.service.repository.embedded.EmbeddedStoragePersistServiceImpl; +====1 +1:30a +2:31c +3:31c + import java.util.Collections; +====2 +1:43c +3:44c + +2:44c + +====2 +1:46c +3:47c + +2:47c + +====2 +1:49c +3:50c + +2:50c + +====2 +1:51c +3:52c + +2:52c + +====2 +1:53c +3:54c + +2:54c + +====2 +1:56c +3:57c + +2:57c + +====2 +1:58c +3:59c + +2:59c + +====2 +1:68c +3:69c + +2:69c + +====2 +1:70c +3:71c + +2:71c + +====2 +1:72c +3:73c + +2:73c + +====2 +1:74c +3:75c + +2:75c + +==== +1:77,80c + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { +2:78,95c + <<<<<<< HEAD + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + ||||||| a41d209d5 + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + ======= + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { + >>>>>>> TEMP_RIGHT_BRANCH +3:78,83c + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { +====2 +1:82a +3:85a +2:98,104c + <<<<<<< HEAD + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + ||||||| a41d209d5 + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + ======= +==== +1:84c + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, +2:106,107c + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, + >>>>>>> TEMP_RIGHT_BRANCH +3:87c + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, +====2 +1:86c +3:89c + +2:109c + +====2 +1:88c +3:91c + +2:111c + +====2 +1:96c +3:99c + +2:119c + +====2 +1:98c +3:101c + +2:121c + +====2 +1:106c +3:109c + +2:129c + +====2 +1:121c +3:124c + +2:144c + +====2 +1:137c +3:140c + +2:160c + +====2 +1:141c +3:144c + List users = databaseOperate.queryMany(sql, new String[] {"%" + role + "%"}, String.class); +2:164c + List users = databaseOperate.queryMany(sql, new String[]{"%" + role + "%"}, String.class); +====2 +1:144c +3:147c + +2:167c + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_ExternalDataSourceServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_ExternalDataSourceServiceImpl.java.txt new file mode 100644 index 0000000000..12f8b7755c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_ExternalDataSourceServiceImpl.java.txt @@ -0,0 +1,194 @@ +====1 +1:19,39c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import javax.sql.DataSource; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + +2:18a +3:18a +==== +1:40a +2:20,70c + <<<<<<< HEAD + import com.alibaba.nacos.common.utils.StringUtils; + import com.alibaba.nacos.config.server.monitor.MetricsMonitor; + import com.alibaba.nacos.config.server.utils.PropertyUtil; + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.zaxxer.hikari.HikariDataSource; + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + ||||||| a41d209d5 + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import javax.sql.DataSource; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import com.alibaba.nacos.common.utils.ConvertUtils; + import com.alibaba.nacos.common.utils.StringUtils; + import com.alibaba.nacos.config.server.monitor.MetricsMonitor; + import com.alibaba.nacos.config.server.utils.ConfigExecutor; + import com.alibaba.nacos.config.server.utils.PropertyUtil; + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.zaxxer.hikari.HikariDataSource; + ======= + import com.alibaba.nacos.common.utils.IPUtil; +3:20c + import com.alibaba.nacos.common.utils.IPUtil; +====1 +1:45c + import com.alibaba.nacos.core.utils.ApplicationUtils; +2:75c +3:25c + import com.alibaba.nacos.sys.env.EnvUtil; +==== +1:46a +2:77,92c + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + >>>>>>> TEMP_RIGHT_BRANCH + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +3:27,41c + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +====1 +1:55,58c + private static final Logger LOGGER = LoggerFactory.getLogger(ExternalDataSourceServiceImpl.class); + + private static final String JDBC_DRIVER_NAME = "com.mysql.cj.jdbc.Driver"; + +2:100a +3:49a +====1 +1:86,87c + private static Pattern ipPattern = Pattern.compile("\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}"); + +2:127a +3:76a +====1 +1:120,122c + + ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +2:160,162c +3:109,111c + + // ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + // ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +====1 +1:130c + .build(ApplicationUtils.getEnvironment(), (dataSource) -> { +2:170c +3:119c + .build(EnvUtil.getEnvironment(), (dataSource) -> { +====1 +1:193c + return "DOWN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +2:233c +3:182c + return "DOWN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====1 +1:196c + return "WARN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +2:236c +3:185c + return "WARN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====1 +1:204,217c + private String getIpFromUrl(String url) { + + Matcher m = ipPattern.matcher(url); + if (m.find()) { + return m.group(); + } + + return ""; + } + + static String defaultIfNull(String value, String defaultValue) { + return null == value ? defaultValue : value; + } + +2:243a +3:192a +====1 +1:272c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +2:298c +3:247c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); +====1 +1:275c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +2:301c +3:250c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_ExternalPermissionPersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_ExternalPermissionPersistServiceImpl.java.txt new file mode 100644 index 0000000000..3c95b4eb48 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_ExternalPermissionPersistServiceImpl.java.txt @@ -0,0 +1,215 @@ +==== +1:21c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +2:21,31c + <<<<<<< HEAD + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + ======= +3:20a +====1 +1:22a +2:33c +3:22c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +==== +1:24a +2:36c + >>>>>>> TEMP_RIGHT_BRANCH +3:25,28c + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; +====1 +1:27,28c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; +2:39c +3:31c + import org.springframework.data.domain.PageRequest; +====2 +1:30a +3:33a +2:42,49c + <<<<<<< HEAD + ||||||| a41d209d5 + import javax.annotation.PostConstruct; + import java.util.ArrayList; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.PERMISSION_ROW_MAPPER; + + ======= +====1 +1:32a +2:52,53c +3:36,37c + import java.util.Collections; + import java.util.List; +====2 +1:35a +3:40a +2:57c + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:46,53c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:68c +3:51c + private PermissionsRepository permissionsRepository; +==== +1:56,83c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role='" + role + "' "; + + if (StringUtils.isBlank(role)) { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:71,140c + <<<<<<< HEAD + org.springframework.data.domain.Page sPage = permissionsRepository + .findAll(QPermissionsEntity.permissionsEntity.role.eq(role), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(PermissionsMapStruct.INSTANCE.convertPermissionInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + ||||||| a41d209d5 + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role='" + role + "' "; + + if (StringUtils.isBlank(role)) { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + ======= + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(role)) { + params = Collections.singletonList(role); + } else { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:54,61c + org.springframework.data.domain.Page sPage = permissionsRepository + .findAll(QPermissionsEntity.permissionsEntity.role.eq(role), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(PermissionsMapStruct.INSTANCE.convertPermissionInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:94,102c + + String sql = "INSERT into permissions (role, resource, action) VALUES (?, ?, ?)"; + + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:151,152c +3:72,73c + + permissionsRepository.save(new PermissionsEntity(role, resource, action)); +====1 +1:113,120c + + String sql = "DELETE from permissions WHERE role=? and resource=? and action=?"; + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:163,167c +3:84,88c + + QPermissionsEntity qPermissions = QPermissionsEntity.permissionsEntity; + permissionsRepository.findOne( + qPermissions.role.eq(role).and(qPermissions.resource.eq(resource)).and(qPermissions.action.eq(action))) + .ifPresent(p -> permissionsRepository.delete(p)); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_ExternalRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_ExternalRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..b2b75323b7 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_ExternalRolePersistServiceImpl.java.txt @@ -0,0 +1,283 @@ +==== +1:21c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +2:21,31c + <<<<<<< HEAD + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + ======= +3:20a +====1 +1:22a +2:33c +3:22c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +==== +1:24a +2:36c + >>>>>>> TEMP_RIGHT_BRANCH +3:25,28c + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; +====1 +1:27,29c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.RowMapper; +2:39c +3:31c + import org.springframework.data.domain.PageRequest; +====2 +1:31a +3:33a +2:42,48c + <<<<<<< HEAD + ||||||| a41d209d5 + import javax.annotation.PostConstruct; + import java.sql.ResultSet; + import java.sql.SQLException; + import java.util.ArrayList; + ======= +==== +1:35a +2:53,54c + import java.util.Collections; + >>>>>>> TEMP_RIGHT_BRANCH +3:38c + import java.util.Collections; +====1 +1:36a +2:56c +3:40c + import java.util.stream.Collectors; +====1 +1:38c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.ROLE_INFO_ROW_MAPPER; +2:57a +3:41a +====1 +1:50,57c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:69c +3:53c + private RolesRepository rolesRepository; +====1 +1:61,81c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from (select distinct role from roles) roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " 1=1 "; + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + return pageInfo; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:73,80c +3:57,64c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +==== +1:86,104c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + where = " 1=1 "; + } + + try { + return helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:85,135c + <<<<<<< HEAD + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(QRolesEntity.rolesEntity.username.eq(username), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + ||||||| a41d209d5 + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + where = " 1=1 "; + } + + try { + return helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + ======= + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { + where = " 1=1 "; + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:69,76c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(QRolesEntity.rolesEntity.username.eq(username), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:110c + * @param role role string value. +2:141c +3:82c + * @param role role string value. +====1 +1:115,122c + String sql = "INSERT into roles (role, username) VALUES (?, ?)"; + + try { + jt.update(sql, role, userName); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:146c +3:87c + rolesRepository.save(new RolesEntity(userName, role)); +====1 +1:131,137c + String sql = "DELETE from roles WHERE role=?"; + try { + jt.update(sql, role); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:155,156c +3:96,97c + Iterable iterable = rolesRepository.findAll(QRolesEntity.rolesEntity.role.eq(role)); + rolesRepository.deleteAll(iterable); +====1 +1:143c + * @param role role string value. +2:162c +3:103c + * @param role role string value. +====1 +1:147,153c + String sql = "DELETE from roles WHERE role=? and username=?"; + try { + jt.update(sql, role, username); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:166,168c +3:107,109c + QRolesEntity qRoles = QRolesEntity.rolesEntity; + rolesRepository.findOne(qRoles.role.eq(role).and(qRoles.username.eq(username))) + .ifPresent(s -> rolesRepository.delete(s)); +==== +1:158,160c + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[]{role}, String.class); + return users; +2:173,185c + <<<<<<< HEAD + List rolesEntities = (List) rolesRepository + .findAll(QRolesEntity.rolesEntity.role.like(role)); + return rolesEntities.stream().map(s -> s.getRole()).collect(Collectors.toList()); + ||||||| a41d209d5 + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[]{role}, String.class); + return users; + ======= + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[] {role}, String.class); + return users; + >>>>>>> TEMP_RIGHT_BRANCH +3:114,116c + List rolesEntities = (List) rolesRepository + .findAll(QRolesEntity.rolesEntity.role.like(role)); + return rolesEntities.stream().map(s -> s.getRole()).collect(Collectors.toList()); +====1 +1:163,172c + private static final class RoleInfoRowMapper implements RowMapper { + + @Override + public RoleInfo mapRow(ResultSet rs, int rowNum) throws SQLException { + RoleInfo roleInfo = new RoleInfo(); + roleInfo.setRole(rs.getString("role")); + roleInfo.setUsername(rs.getString("username")); + return roleInfo; + } + } +2:187a +3:118a diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_ExternalStoragePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_ExternalStoragePersistServiceImpl.java.txt new file mode 100644 index 0000000000..44c1365407 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_ExternalStoragePersistServiceImpl.java.txt @@ -0,0 +1,3017 @@ +====1 +1:41,42c + import com.alibaba.nacos.config.server.service.datasource.DataSourceService; + import com.alibaba.nacos.config.server.service.datasource.DynamicDataSource; +2:41,74c +3:41,74c + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.HisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.QHisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QTenantInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.TenantInfoEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAdvanceInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAllInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigHistoryInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4BetaMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4TagMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoAggrMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoBetaWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoChangedMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoEntityMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoTagWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.TenantInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoAggrRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoBetaRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoTagRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigTagsRelationRepository; + import com.alibaba.nacos.config.server.modules.repository.HisConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.TenantInfoRepository; +====1 +1:48c + import com.google.common.collect.Lists; +2:80,81c +3:80,81c + import com.querydsl.core.BooleanBuilder; + import lombok.extern.slf4j.Slf4j; +====1 +1:50a +2:84,85c +3:84,85c + import org.springframework.beans.BeanUtils; + import org.springframework.beans.factory.annotation.Autowired; +====1 +1:55c + import org.springframework.dao.IncorrectResultSizeDataAccessException; +2:90,92c +3:90,92c + import org.springframework.data.domain.PageRequest; + import org.springframework.data.domain.Sort; + import org.springframework.data.jpa.domain.Specification; +====1 +1:57,61c + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.PreparedStatementCreator; + import org.springframework.jdbc.core.PreparedStatementSetter; + import org.springframework.jdbc.support.GeneratedKeyHolder; + import org.springframework.jdbc.support.KeyHolder; +2:93a +3:93a +====1 +1:70c + import javax.annotation.PostConstruct; +2:102,105c +3:102,105c + import javax.persistence.criteria.CriteriaBuilder; + import javax.persistence.criteria.CriteriaQuery; + import javax.persistence.criteria.Predicate; + import javax.persistence.criteria.Root; +====1 +1:72,75c + import java.sql.Connection; + import java.sql.PreparedStatement; + import java.sql.SQLException; + import java.sql.Statement; +2:106a +3:106a +====1 +1:81a +2:113c +3:113c + import java.util.stream.Collectors; +====1 +1:83,97c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ADVANCE_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ALL_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4TAG_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_AGGR_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BASE_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_CHANGED_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_KEY_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_DETAIL_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_LIST_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.TENANT_INFO_ROW_MAPPER; +2:114a +3:114a +====1 +1:104a +2:122c +3:122c + @Slf4j +====1 +1:110c + private DataSourceService dataSourceService; +2:128,129c +3:128,129c + @Autowired + private ConfigInfoRepository configInfoRepository; +====1 +1:112c + private static final String SQL_FIND_ALL_CONFIG_INFO = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,c_schema from config_info"; +2:131,132c +3:131,132c + @Autowired + private ConfigInfoBetaRepository configInfoBetaRepository; +====1 +1:114c + private static final String SQL_TENANT_INFO_COUNT_BY_TENANT_ID = "select count(1) from tenant_info where tenant_id = ?"; +2:134,135c +3:134,135c + @Autowired + private ConfigInfoTagRepository configInfoTagRepository; +====1 +1:116c + private static final String SQL_FIND_CONFIG_INFO_BY_IDS = "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5 FROM config_info WHERE "; +2:137,138c +3:137,138c + @Autowired + private ConfigTagsRelationRepository configTagsRelationRepository; +====1 +1:118c + private static final String SQL_DELETE_CONFIG_INFO_BY_IDS = "DELETE FROM config_info WHERE "; +2:140,141c +3:140,141c + @Autowired + private HisConfigInfoRepository hisConfigInfoRepository; +====1 +1:120c + private static final String PATTERN_STR = "*"; +2:143,144c +3:143,144c + @Autowired + private TenantInfoRepository tenantInfoRepository; +====1 +1:122c + private static final int QUERY_LIMIT_SIZE = 50; +2:146,147c +3:146,147c + @Autowired + private ConfigInfoAggrRepository configInfoAggrRepository; +====1 +1:124,126c + protected JdbcTemplate jt; + + protected TransactionTemplate tjt; +2:149,150c +3:149,150c + @Autowired + private TransactionTemplate tjt; +====1 +1:133,171c + /** + * init datasource. + */ + @PostConstruct + public void init() { + dataSourceService = DynamicDataSource.getInstance().getDataSource(); + + jt = getJdbcTemplate(); + tjt = getTransactionTemplate(); + } + + public boolean checkMasterWritable() { + return dataSourceService.checkMasterWritable(); + } + + public void setBasicDataSourceService(DataSourceService dataSourceService) { + this.dataSourceService = dataSourceService; + } + + public synchronized void reload() throws IOException { + this.dataSourceService.reload(); + } + + /** + * For unit testing. + */ + public JdbcTemplate getJdbcTemplate() { + return this.dataSourceService.getJdbcTemplate(); + } + + public TransactionTemplate getTransactionTemplate() { + return this.dataSourceService.getTransactionTemplate(); + } + + @SuppressWarnings("checkstyle:AbbreviationAsWordInName") + public String getCurrentDBUrl() { + return this.dataSourceService.getCurrentDbUrl(); + } + +2:156a +3:156a +====1 +1:174c + return new ExternalStoragePaginationHelperImpl(jt); +2:159c +3:159c + return null; +====1 +1:182,191c + boolean result = tjt.execute(status -> { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:167,183c +3:167,183c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfoEntity.getGroupId(), + configInfoEntity.getTenantId()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; + } + return Boolean.TRUE; +====1 +1:193c + return Boolean.TRUE; +2:184a +3:184a +====1 +1:202c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:193c +3:193c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:204,207c + jt.update("INSERT INTO config_info_beta(data_id,group_id,tenant_id,app_name,content,md5,beta_ips,src_ip," + + "src_user,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, appNameTmp, configInfo.getContent(), md5, betaIps, srcIp, srcUser, + time, time); +2:195,207c +3:195,207c + ConfigInfoBetaEntity configInfoBeta = new ConfigInfoBetaEntity(); + configInfoBeta.setDataId(configInfo.getDataId()); + configInfoBeta.setGroupId(configInfo.getGroup()); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setBetaIps(betaIps); + configInfoBeta.setMd5(md5); + configInfoBeta.setGmtCreate(time); + configInfoBeta.setGmtModified(time); + configInfoBeta.setSrcUser(srcUser); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setTenantId(tenantTmp); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:209c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:209c +3:209c + log.error("[db-error] " + e.toString(), e); +====1 +1:220c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:219a +3:219a +====1 +1:222,226c + jt.update( + "INSERT INTO config_info_tag(data_id,group_id,tenant_id,tag_id,app_name,content,md5,src_ip,src_user," + + "gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, tagTmp, appNameTmp, configInfo.getContent(), md5, srcIp, srcUser, + time, time); +2:221,234c +3:221,234c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoTagEntity configInfoTag = new ConfigInfoTagEntity(); + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:228c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:236c +3:236c + log.error("[db-error] " + e.toString(), e); +====1 +1:236,254c + boolean result = tjt.execute(status -> { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + /* + If the appName passed by the user is not empty, use the persistent user's appName, + otherwise use db; when emptying appName, you need to pass an empty string + */ + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // delete all tags and then recreate + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); +2:244,268c +3:244,268c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + // 用户传过来的appName不为空,则用持久化用户的appName,否则用db的;清空appName的时候需要传空串 + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + configInfo.setId(oldConfigInfo.getId()); + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // 删除所有tag,然后再重新创建 + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); + } + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:256,259c + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:270c +3:270c + return Boolean.TRUE; +====1 +1:261c + return Boolean.TRUE; +2:271a +3:271a +====1 +1:268c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); +2:277a +3:277a +====1 +1:270,275c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + try { + jt.update( + "UPDATE config_info_beta SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5, srcIp, srcUser, + time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp); +2:279,300c +3:279,300c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(configInfo.getDataId())) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(configInfo.getDataId())); + } + if (StringUtils.isNotBlank(configInfo.getGroup())) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(configInfo.getGroup())); + } + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenantTmp)); + } + ConfigInfoBetaEntity configInfoBeta = configInfoBetaRepository.findOne(booleanBuilder).orElse(null); + try { + String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setMd5(md5); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setSrcUser(srcUser); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:277c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:302c +3:302c + log.error("[db-error] " + e.toString(), e); +====1 +1:288,293c + try { + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + jt.update( + "UPDATE config_info_tag SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", configInfo.getContent(), md5, + srcIp, srcUser, time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp, tagTmp); +2:313,331c +3:313,331c + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + ConfigInfoTagEntity configInfoTag = configInfoTagRepository.findOne( + qConfigInfoTag.dataId.eq(configInfo.getDataId()).and(qConfigInfoTag.groupId.eq(configInfo.getGroup())) + .and(qConfigInfoTag.tenantId.eq(tenantTmp)).and(qConfigInfoTag.tagId.eq(tagTmp))) + .orElse(new ConfigInfoTagEntity()); + try { + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:295c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:333c +3:333c + log.error("[db-error] " + e.toString(), e); +====1 +1:323,330c + try { + jt.update( + "UPDATE config_info SET md5 = ? WHERE data_id=? AND group_id=? AND tenant_id=? AND gmt_modified=?", + md5, dataId, group, tenantTmp, lastTime); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:361,378c +3:361,378c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (lastTime != null) { + booleanBuilder.and(qConfigInfo.gmtModified.eq(lastTime)); + } + configInfoRepository.findOne(booleanBuilder).ifPresent(config -> { + config.setMd5(md5); + configInfoRepository.save(config); + }); +====1 +1:416,421c + tjt.execute(status -> { + try { + ConfigInfo configInfo = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo != null) { + jt.update("DELETE FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, + group, tenantTmp); +2:464,474c +3:464,474c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo4Beta configInfo4Beta = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo4Beta != null) { + configInfoBetaRepository.deleteById(configInfo4Beta.getId()); + } + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:423,425c + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:476c +3:476c + return Boolean.TRUE; +====1 +1:427c + return Boolean.TRUE; +2:477a +3:477a +====1 +1:439,442c + String select = "SELECT content FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + String insert = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + String update = "UPDATE config_info_aggr SET content = ? , gmt_modified = ? WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + +2:489,502c +3:489,502c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } +====1 +1:445,446c + String dbContent = jt + .queryForObject(select, new Object[] {dataId, group, tenantTmp, datumId}, String.class); +2:505c +3:505c + ConfigInfoAggrEntity result = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); +====1 +1:448c + if (dbContent != null && dbContent.equals(content)) { +2:507c +3:507c + if (result.getContent() != null && result.getContent().equals(content)) { +====1 +1:451c + return jt.update(update, content, now, dataId, group, tenantTmp, datumId) > 0; +2:510,513c +3:510,513c + result.setContent(content); + result.setGmtModified(now); + configInfoAggrRepository.save(result); + return true; +====1 +1:454c + return jt.update(insert, dataId, group, tenantTmp, datumId, appNameTmp, content, now) > 0; +2:516,526c +3:516,526c + ConfigInfoAggrEntity configInfoAggrEntity = new ConfigInfoAggrEntity(); + configInfoAggrEntity.setDataId(dataId); + configInfoAggrEntity.setGroupId(group); + configInfoAggrEntity.setDatumId(datumId); + configInfoAggrEntity.setContent(content); + configInfoAggrEntity.setGmtModified(now); + configInfoAggrEntity.setAppName(appNameTmp); + configInfoAggrEntity.setTenantId(tenantTmp); + configInfoAggrRepository.save(configInfoAggrEntity); + return true; + +====1 +1:465,466c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; +2:536a +3:536a +====1 +1:468,482c + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index++, tenantTmp); + ps.setString(index, datumId); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:537a +3:537a +====1 +1:487,502c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=?"; + + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index, tenantTmp); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:542,545c +3:542,545c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); +====1 +1:503a +2:547,553c +3:547,553c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + configInfoAggrRepository.findOne(booleanBuilder).ifPresent(aggr -> configInfoAggrRepository.delete(aggr)); +====1 +1:509,523c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final StringBuilder datumString = new StringBuilder(); + for (String datum : datumList) { + datumString.append("'").append(datum).append("',"); + } + datumString.deleteCharAt(datumString.length() - 1); + final String sql = + "delete from config_info_aggr where data_id=? and group_id=? and tenant_id=? and datum_id in (" + + datumString.toString() + ")"; + try { + jt.update(sql, dataId, group, tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:558a +3:558a +====1 +1:529,536c + String sql = "delete from his_config_info where gmt_modified < ? limit ?"; + PaginationHelper helper = createPaginationHelper(); + try { + helper.updateLimit(sql, new Object[] {startTime, limitSize}); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:564,567c +3:564,567c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository + .findAll(qHisConfigInfo.gmtModified.lt(startTime), PageRequest.of(0, limitSize)); + hisConfigInfoRepository.deleteAll(iterable); +====1 +1:541,542c + String sql = "SELECT COUNT(*) FROM his_config_info WHERE gmt_modified < ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {startTime}); +2:572,573c +3:572,573c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Long result = hisConfigInfoRepository.count(qHisConfigInfo.gmtModified.lt(startTime)); +====1 +1:551c + String sql = "SELECT max(id) FROM config_info"; +2:581a +3:581a +====1 +1:553c + return jt.queryForObject(sql, Long.class); +2:583,584c +3:583,584c + //TODO 关系型特性查询 + return configInfoRepository.findConfigMaxId(); +====1 +1:591,617c + try { + Boolean isReplaceOk = tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + String appNameTmp = appName == null ? "" : appName; + removeAggrConfigInfo(dataId, group, tenant); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + for (Map.Entry datumEntry : datumMap.entrySet()) { + jt.update(sql, dataId, group, tenantTmp, datumEntry.getKey(), appNameTmp, + datumEntry.getValue(), new Timestamp(System.currentTimeMillis())); + } + } catch (Throwable e) { + throw new TransactionSystemException("error in addAggrConfigInfo"); + } + return Boolean.TRUE; + } + }); + if (isReplaceOk == null) { + return false; + } + return isReplaceOk; + } catch (TransactionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:622c +3:622c + return true; +====1 +1:624,636c + String sql = "SELECT DISTINCT data_id, group_id FROM config_info"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:629c +3:629c + return null; +====1 +1:641,651c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,beta_ips FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO4BETA_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:634,647c +3:634,647c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenant)); + } + ConfigInfoBetaEntity configInfoBetaEntity = configInfoBetaRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoBeta data null")); + return ConfigInfo4BetaMapStruct.INSTANCE.convertConfigInfo4Beta(configInfoBetaEntity); +====1 +1:659,668c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,tag_id,app_name,content FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", + new Object[] {dataId, group, tenantTmp, tagTmp}, CONFIG_INFO4TAG_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:655,671c +3:655,671c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + ConfigInfoTagEntity result = configInfoTagRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoTag data null")); + return ConfigInfo4TagMapStruct.INSTANCE.convertConfigInfo4Tag(result); +====1 +1:674,684c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=? AND app_name=?", + new Object[] {dataId, group, tenantTmp, appName}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:677c +3:677c + return null; +====1 +1:690,733c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(group); + paramList.add(tenantTmp); + + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and group_id=? and tenant_id=? "); + if (StringUtils.isNotBlank(configTags)) { + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.group_id=? and a.tenant_id=? "); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sql.append(", "); + } + sql.append("?"); + paramList.add(tagArr[i]); + } + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return this.jt.queryForObject(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:683c +3:683c + return null; +====1 +1:739,748c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, StringUtils.EMPTY}, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:689,695c +3:689,695c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + return configInfoRepository.findOne(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group))).map(s -> { + ConfigInfoBase configInfoBase = new ConfigInfoBase(); + BeanUtils.copyProperties(s, configInfoBase); + configInfoBase.setGroup(s.getGroupId()); + return configInfoBase; + }).orElse(null); +====1 +1:753,762c + try { + return this.jt + .queryForObject("SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE ID=?", + new Object[] {id}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:700,701c +3:700,701c + + return null; +====1 +1:767,776c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5,type FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:706,712c +3:706,712c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:777a +2:714,718c +3:714,718c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity result = configInfoRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfo(result); +====1 +1:783,792c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:724,725c +3:724,725c + + return null; +====1 +1:798,807c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? and app_name=?", + new Object[] {dataId, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:731,732c +3:731,732c + + return null; +====1 +1:813,864c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where data_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:738c +3:738c + return null; +====1 +1:870,871c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); +2:743a +3:743a +====1 +1:874,922c + String sqlCount = "select count(*) from config_info"; + String sql = "select ID,data_id,group_id,tenant_id,app_name,content,type from config_info"; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id"; + sql = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id"; + + where.append(" a.tenant_id=? "); + + if (StringUtils.isNotBlank(dataId)) { + where.append(" and a.data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and a.group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and a.app_name=? "); + paramList.add(appName); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id=? "); + if (StringUtils.isNotBlank(dataId)) { + where.append(" and data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and app_name=? "); + paramList.add(appName); + } +2:746,765c +3:746,765c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + } + + private void buildConfigInfoCommonCondition(BooleanBuilder booleanBuilder, QConfigInfoEntity qConfigInfo, + final String dataId, final String group, final String appName) { + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); +====1 +1:924,929c + try { + return helper.fetchPage(sqlCount + where, sql + where, paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:767,771c +3:767,771c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(appName)) { + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:935,943c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:777c +3:777c + return null; +====1 +1:949,958c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:783c +3:783c + return null; +====1 +1:964,973c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=? and app_name =?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? and app_name =?", + new Object[] {group, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:789c +3:789c + return null; +====1 +1:979,1032c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder( + "select count(*) from config_info where group_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(group); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:795c +3:795c + return null; +====1 +1:1038,1047c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where tenant_id like ? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? and app_name=?", + new Object[] {generateLikeArgument(tenantTmp), appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:801c +3:801c + return null; +====1 +1:1053,1104c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where tenant_id like ? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:807c +3:807c + return null; +====1 +1:1110,1118c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:813c +3:813c + return null; +====1 +1:1123,1124c + String sql = " SELECT COUNT(ID) FROM config_info "; + Integer result = jt.queryForObject(sql, Integer.class); +2:818c +3:818c + Long result = configInfoRepository.count(); +====1 +1:1133,1134c + String sql = " SELECT COUNT(ID) FROM config_info where tenant_id like ?"; + Integer result = jt.queryForObject(sql, new Object[] {tenant}, Integer.class); +2:827,828c +3:827,828c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.tenantId.like(tenant)); +====1 +1:1143,1144c + String sql = " SELECT COUNT(ID) FROM config_info_beta "; + Integer result = jt.queryForObject(sql, Integer.class); +2:837c +3:837c + Long result = configInfoBetaRepository.count(); +====1 +1:1153,1154c + String sql = " SELECT COUNT(ID) FROM config_info_tag "; + Integer result = jt.queryForObject(sql, Integer.class); +2:846c +3:846c + Long result = configInfoTagRepository.count(); +====1 +1:1162,1165c + public List getTenantIdList(int page, int pageSize) { + String sql = "SELECT tenant_id FROM config_info WHERE tenant_id != '' GROUP BY tenant_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:854,864c +3:854,864c + public List getTenantIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("tenantId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1169,1172c + public List getGroupIdList(int page, int pageSize) { + String sql = "SELECT group_id FROM config_info WHERE tenant_id ='' GROUP BY group_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:868,878c +3:868,878c + public List getGroupIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("groupId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1178,1179c + String sql = " SELECT COUNT(ID) FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {dataId, group, tenantTmp}); +2:884,886c +3:884,886c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))); +====1 +1:1188,1213c + if (datumIds == null || datumIds.isEmpty()) { + return 0; + } + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder sql = new StringBuilder( + " SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ? and datum_id"); + if (isIn) { + sql.append(" in ("); + } else { + sql.append(" not in ("); + } + for (int i = 0, size = datumIds.size(); i < size; i++) { + if (i > 0) { + sql.append(", "); + } + sql.append("?"); + } + sql.append(")"); + + List objectList = Lists.newArrayList(dataId, group, tenantTmp); + objectList.addAll(datumIds); + Integer result = jt.queryForObject(sql.toString(), Integer.class, objectList.toArray()); + if (result == null) { + throw new IllegalArgumentException("aggrConfigInfoCount error"); + } + return result.intValue(); +2:895c +3:895c + return 0; +====1 +1:1228,1242c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5 " + + " FROM ( SELECT id FROM config_info WHERE tenant_id like ? ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, + new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:910c +3:910c + return null; +====1 +1:1247,1282c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String select = " SELECT data_id,group_id,app_name FROM ( " + + " SELECT id FROM config_info WHERE tenant_id LIKE ? ORDER BY id LIMIT ?, ? )" + + " g, config_info t WHERE g.id = t.id "; + + final int totalCount = configInfoCount(tenant); + int pageCount = totalCount / pageSize; + if (totalCount > pageSize * pageCount) { + pageCount++; + } + + if (pageNo > pageCount) { + return null; + } + + final Page page = new Page(); + page.setPageNumber(pageNo); + page.setPagesAvailable(pageCount); + page.setTotalCount(totalCount); + + try { + List result = jt + .query(select, new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, + // new Object[0], + CONFIG_KEY_ROW_MAPPER); + + for (ConfigKey item : result) { + page.getPageItems().add(item); + } + return page; + } catch (EmptyResultDataAccessException e) { + return page; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:915c +3:915c + return null; +====1 +1:1288,1300c + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,content,md5" + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) " + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:921c +3:921c + return null; +====1 +1:1305,1319c + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = " SELECT t.id,type,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + + List params = new ArrayList(); + + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, params.toArray(), pageNo, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:926c +3:926c + return null; +====1 +1:1324,1332c + String select = "SELECT id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,type from config_info where id > ? order by id asc limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(select, new Object[] {lastMaxId, 0, pageSize}, 1, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:931,940c +3:931,940c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(qConfigInfo.id.gt(lastMaxId), PageRequest.of(0, pageSize, Sort.by(Sort.Order.asc("id")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1337,1349c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_beta"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,beta_ips " + + " FROM ( SELECT id FROM config_info_beta ORDER BY id LIMIT ?,? )" + + " g, config_info_beta t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:945,952c +3:945,952c + org.springframework.data.domain.Page sPage = configInfoBetaRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoBetaWrapperMapStruct.INSTANCE.convertConfigInfoBetaWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1354,1366c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_tag"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,tag_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info_tag ORDER BY id LIMIT ?,? ) " + + "g, config_info_tag t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:957,964c +3:957,964c + org.springframework.data.domain.Page sPage = configInfoTagRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoTagWrapperMapStruct.INSTANCE.convertConfigInfoTagWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1372,1414c + // assert dataids group not null + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + // if dataids empty return empty list + if (CollectionUtils.isEmpty(dataIds)) { + return Collections.emptyList(); + } + + // Batch query limit + // The number of in is controlled within 100, the shorter the length of the SQL statement, the better + if (subQueryLimit > QUERY_LIMIT_SIZE) { + subQueryLimit = 50; + } + List result = new ArrayList(dataIds.size()); + + String sqlStart = "select data_id, group_id, tenant_id, app_name, content from config_info where group_id = ? and tenant_id = ? and data_id in ("; + String sqlEnd = ")"; + StringBuilder subQuerySql = new StringBuilder(); + + for (int i = 0; i < dataIds.size(); i += subQueryLimit) { + // dataids + List params = new ArrayList( + dataIds.subList(i, i + subQueryLimit < dataIds.size() ? i + subQueryLimit : dataIds.size())); + + for (int j = 0; j < params.size(); j++) { + subQuerySql.append("?"); + if (j != params.size() - 1) { + subQuerySql.append(","); + } + } + + // group + params.add(0, group); + params.add(1, tenantTmp); + + List r = this.jt + .query(sqlStart + subQuerySql.toString() + sqlEnd, params.toArray(), CONFIG_INFO_ROW_MAPPER); + + // assert not null + if (r != null && r.size() > 0) { + result.addAll(r); + } + } + return result; +2:970c +3:970c + return null; +====1 +1:1420,1463c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + if (StringUtils.isBlank(appName)) { + return this.findAllConfigInfo(pageNo, pageSize, tenantTmp); + } else { + return this.findConfigInfoByApp(pageNo, pageSize, tenantTmp, appName); + } + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + where += " and tenant_id like ? "; + params.add(generateLikeArgument(tenantTmp)); + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:976c +3:976c + return null; +====1 +1:1469,1562c + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + for (ConfigKey configInfo : configKeys) { + String dataId = configInfo.getDataId(); + String group = configInfo.getGroup(); + String appName = configInfo.getAppName(); + + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + return helper.fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:982c +3:982c + return null; +====1 +1:1572,1636c + PaginationHelper helper = createPaginationHelper(); + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info"; + StringBuilder where = new StringBuilder(" where "); + List params = new ArrayList(); + params.add(generateLikeArgument(tenantTmp)); + if (StringUtils.isNotBlank(configTags)) { + sqlCountRows = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id "; + sqlFetchRows = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join config_tags_relation b on a.id=b.id "; + + where.append(" a.tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and a.data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and a.group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and a.app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and a.content like ? "); + params.add(generateLikeArgument(content)); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + params.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and content like ? "); + params.add(generateLikeArgument(content)); + } + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:992,1008c +3:992,1008c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.like(tenant)); + } + if (StringUtils.isNotBlank(content)) { + booleanBuilder.and(qConfigInfo.content.like(content)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1642,1672c + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + throw new IOException("invalid param"); + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,content from config_info where "; + String where = " 1=1 and tenant_id='' "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1014c +3:1014c + return null; +====1 +1:1678,1691c + String sql = "SELECT id,data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; + + try { + return this.jt + .queryForObject(sql, new Object[] {dataId, group, tenantTmp, datumId}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + // EmptyResultDataAccessException, indicating that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); +2:1020,1026c +3:1020,1026c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); +====1 +1:1692a +2:1028,1036c +3:1028,1036c + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenantTmp)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } + + ConfigInfoAggrEntity configInfoAggrEntity = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggr(configInfoAggrEntity); +====1 +1:1697,1710c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "SELECT data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? ORDER BY datum_id"; + + try { + return this.jt.query(sql, new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1041c +3:1041c + return null; +====1 +1:1717,1730c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where data_id=? and " + + "group_id=? and tenant_id=? order by datum_id limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, new Object[] {dataId, group, tenantTmp}, sqlFetchRows, + new Object[] {dataId, group, tenantTmp, (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_AGGR_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1048,1058c +3:1048,1058c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + org.springframework.data.domain.Page sPage = configInfoAggrRepository.findAll( + qConfigInfoAggr.dataId.eq(dataId).and(qConfigInfoAggr.groupId.eq(group)) + .and(qConfigInfoAggr.tenantId.eq(tenantTmp)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.by("datumId")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggrList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1737,1831c + String sqlCountRows = "select count(*) from config_info_aggr where "; + String sqlFetchRows = "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + + for (ConfigKey configInfoAggr : configKeys) { + String dataId = configInfoAggr.getDataId(); + String group = configInfoAggr.getGroup(); + String appName = configInfoAggr.getAppName(); + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + Page result = helper + .fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_AGGR_ROW_MAPPER); + return result; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1065c +3:1065c + return null; +====1 +1:1836,1848c + String sql = "SELECT DISTINCT data_id, group_id, tenant_id FROM config_info_aggr"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_CHANGED_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1070,1071c +3:1070,1071c + List list = configInfoAggrRepository.findAllAggrGroup(); + return ConfigInfoChangedMapStruct.INSTANCE.convertConfigInfoChangedList(list); +====1 +1:1853,1864c + String sql = "SELECT datum_id FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND content = ? "; + + try { + return this.jt.queryForList(sql, new Object[] {dataId, groupId, content}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1076c +3:1076c + return null; +====1 +1:1869,1877c + try { + List> list = jt.queryForList( + "SELECT data_id, group_id, tenant_id, app_name, content, gmt_modified FROM config_info WHERE gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertChangeConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1081,1084c +3:1081,1084c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Iterable iterable = configInfoRepository + .findAll(qConfigInfo.gmtModified.goe(startTime).and(qConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList((List) iterable); +====1 +1:1884,1924c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_modified from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + if (!StringUtils.isBlank(tenantTmp)) { + where += " and tenant_id = ? "; + params.add(tenantTmp); + } + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (startTime != null) { + where += " and gmt_modified >=? "; + params.add(startTime); + } + if (endTime != null) { + where += " and gmt_modified <=? "; + params.add(endTime); + } + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + lastMaxId, CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1091c +3:1091c + return null; +====1 +1:1929,1937c + try { + List> list = jt.queryForList( + "SELECT DISTINCT data_id, group_id, tenant_id FROM his_config_info WHERE op_type = 'D' AND gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertDeletedConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1096,1100c +3:1096,1100c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository.findAll( + qHisConfigInfo.opType.eq("D").and(qHisConfigInfo.gmtModified.goe(startTime)) + .and(qHisConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList((List) iterable); +====1 +1:1943,1947c + final String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + final String tenantTmp = + StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + +2:1105a +3:1105a +====1 +1:1953,1960c + + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + + KeyHolder keyHolder = new GeneratedKeyHolder(); + + final String sql = + "INSERT INTO config_info(data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_create," + + "gmt_modified,c_desc,c_use,effect,type,c_schema) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; +2:1111,1120c +3:1111,1120c + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setGmtCreate(time); + configInfoEntity.setGmtModified(time); +====1 +1:1963,1991c + jt.update(new PreparedStatementCreator() { + @Override + public PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + PreparedStatement ps = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS); + ps.setString(1, configInfo.getDataId()); + ps.setString(2, configInfo.getGroup()); + ps.setString(3, tenantTmp); + ps.setString(4, appNameTmp); + ps.setString(5, configInfo.getContent()); + ps.setString(6, md5Tmp); + ps.setString(7, srcIp); + ps.setString(8, srcUser); + ps.setTimestamp(9, time); + ps.setTimestamp(10, time); + ps.setString(11, desc); + ps.setString(12, use); + ps.setString(13, effect); + ps.setString(14, type); + ps.setString(15, schema); + return ps; + } + }, keyHolder); + Number nu = keyHolder.getKey(); + if (nu == null) { + throw new IllegalArgumentException("insert config_info fail"); + } + return nu.longValue(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:1123,1124c +3:1123,1124c + return configInfoRepository.save(configInfoEntity).getId(); + } catch (Exception e) { +====1 +1:2008,2015c + try { + jt.update( + "INSERT INTO config_tags_relation(id,tag_name,tag_type,data_id,group_id,tenant_id) VALUES(?,?,?,?,?,?)", + configId, tagName, null, dataId, group, tenant); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1141,1147c +3:1141,1147c + ConfigTagsRelationEntity configTagsRelation = new ConfigTagsRelationEntity(); + configTagsRelation.setId(configId); + configTagsRelation.setTagName(tagName); + configTagsRelation.setDataId(dataId); + configTagsRelation.setGroupId(group); + configTagsRelation.setTenantId(tenant); + configTagsRelationRepository.save(configTagsRelation); +====1 +1:2020,2025c + try { + jt.update("DELETE FROM config_tags_relation WHERE id=?", id); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1152c +3:1152c + configTagsRelationRepository.findById(id).ifPresent(s -> configTagsRelationRepository.delete(s)); +====1 +1:2030,2040c + String sql = "SELECT tag_name FROM config_tags_relation WHERE tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1157c +3:1157c + return null; +====1 +1:2045,2055c + String sql = "SELECT tag_name FROM config_tags_relation WHERE data_id=? AND group_id=? AND tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {dataId, group, tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1162,1176c +3:1162,1176c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigTagsRelationEntity qConfigTagsRelation = QConfigTagsRelationEntity.configTagsRelationEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigTagsRelation.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigTagsRelation.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigTagsRelation.tenantId.eq(tenant)); + } + Iterable iterable = configTagsRelationRepository.findAll(booleanBuilder); + List result = new ArrayList<>(); + iterable.forEach(s -> result.add(s.getTagName())); + return result; +====1 +1:2061,2067c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + jt.update("DELETE FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, group, + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1182,1187c +3:1182,1187c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); +====1 +1:2068a +2:1189,1190c +3:1189,1190c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + configInfos.forEach(s -> configInfoRepository.delete(s)); +====1 +1:2076,2077c + StringBuilder sql = new StringBuilder(SQL_DELETE_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1198,1200c +3:1198,1200c + if (StringUtils.isBlank(ids)) { + return; + } +====1 +1:2081,2084c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1203a +3:1203a +====1 +1:2087,2093c + sql.append(") "); + try { + jt.update(sql.toString(), paramList.toArray()); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1206,1219c +3:1206,1219c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + for (Long id : paramList) { + configInfoRepository.deleteById(id); + } + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2099,2106c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String tagTmp = StringUtils.isBlank(tag) ? StringUtils.EMPTY : tag; + try { + jt.update("DELETE FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", dataId, + group, tenantTmp, tagTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1225,1234c +3:1225,1234c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); +====1 +1:2107a +2:1236,1251c +3:1236,1251c + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + Iterable configInfoTags = configInfoTagRepository.findAll(booleanBuilder); + configInfoTags.forEach(s -> configInfoTagRepository.delete(s)); + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2113,2115c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String tenantTmp = StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:1257,1258c +3:1257,1258c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:2122,2131c + try { + jt.update("UPDATE config_info SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?," + + "app_name=?,c_desc=?,c_use=?,effect=?,type=?,c_schema=? " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5Tmp, srcIp, srcUser, + time, appNameTmp, desc, use, effect, type, schema, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1265,1272c +3:1265,1272c + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setGmtModified(time); + configInfoRepository.save(configInfoEntity); +====1 +1:2139,2140c + StringBuilder sql = new StringBuilder(SQL_FIND_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1279a +3:1279a +====1 +1:2144,2147c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1282a +3:1282a +====1 +1:2150,2158c + sql.append(") "); + try { + return this.jt.query(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1285,1288c +3:1285,1288c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + List list = (List) configInfoRepository + .findAll(qConfigInfo.id.in(paramList)); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(list); +====1 +1:2163,2176c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAdvanceInfo configAdvance = this.jt.queryForObject( + "SELECT gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_ADVANCE_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1293,1314c +3:1293,1314c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAdvanceInfo configAdvance = ConfigAdvanceInfoMapStruct.INSTANCE.convertConfigAdvanceInfo(configInfo); + List configTagList = this.selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2178c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1315a +3:1315a +====1 +1:2180,2185c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1317c +3:1317c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2186a +2:1319c +3:1319c + return configAdvance; +====1 +1:2191,2206c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAllInfo configAdvance = this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5," + + "gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_ALL_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1324,1346c +3:1324,1346c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAllInfo configAdvance = ConfigAllInfoMapStruct.INSTANCE.convertConfigAllInfo(configInfo); + configAdvance.setGroup(configInfo.getGroupId()); + List configTagList = selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2208c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1347a +3:1347a +====1 +1:2210,2215c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1349c +3:1349c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2216a +2:1351c +3:1351c + return configAdvance; +====1 +1:2225,2233c + try { + jt.update( + "INSERT INTO his_config_info (id,data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_modified,op_type) " + + "VALUES(?,?,?,?,?,?,?,?,?,?,?)", id, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp, appNameTmp, configInfo.getContent(), md5Tmp, srcIp, srcUser, time, ops); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1360,1373c +3:1360,1373c + HisConfigInfoEntity hisConfigInfo = new HisConfigInfoEntity(); + hisConfigInfo.setId(id); + hisConfigInfo.setDataId(configInfo.getDataId()); + hisConfigInfo.setGroupId(configInfo.getGroup()); + hisConfigInfo.setAppName(appNameTmp); + hisConfigInfo.setContent(configInfo.getContent()); + hisConfigInfo.setMd5(md5Tmp); + hisConfigInfo.setGmtModified(time); + hisConfigInfo.setSrcUser(srcUser); + hisConfigInfo.setSrcIp(srcIp); + hisConfigInfo.setOpType(ops); + hisConfigInfo.setTenantId(tenantTmp); + hisConfigInfo.setGmtCreate(time); + hisConfigInfoRepository.save(hisConfigInfo); +====1 +1:2239,2255c + PaginationHelper helper = createPaginationHelper(); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from his_config_info where data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select nid,data_id,group_id,tenant_id,app_name,src_ip,src_user,op_type,gmt_create,gmt_modified from his_config_info " + + "where data_id = ? and group_id = ? and tenant_id = ? order by nid desc"; + + Page page = null; + try { + page = helper + .fetchPage(sqlCountRows, sqlFetchRows, new Object[] {dataId, group, tenantTmp}, pageNo, pageSize, + HISTORY_LIST_ROW_MAPPER); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG + .error("[list-config-history] error, dataId:{}, group:{}", new Object[] {dataId, group}, e); + throw e; + } +2:1379,1389c +3:1379,1389c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + org.springframework.data.domain.Page sPage = hisConfigInfoRepository.findAll( + qHisConfigInfo.dataId.eq(dataId).and(qHisConfigInfo.groupId.eq(group)) + .and(qHisConfigInfo.tenantId.eq(tenant)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("nid")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); +====1 +1:2262,2270c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "INSERT INTO app_configdata_relation_subs(data_id,group_id,app_name,gmt_modified) VALUES(?,?,?,?)", + dataId, group, appNameTmp, date); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1395a +3:1395a +====1 +1:2276,2284c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "UPDATE app_configdata_relation_subs SET gmt_modified=? WHERE data_id=? AND group_id=? AND app_name=?", + time, dataId, group, appNameTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1401c +3:1401c + +==== +1:2289c + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; +2:1406,1434c + <<<<<<< HEAD + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + ||||||| a41d209d5 + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[list-config-history] error, nid:{}", new Object[] {nid}, e); + throw e; + } + ======= + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[detail-config-history] error, nid:{}", new Object[] {nid}, e); + throw e; + } + } + + @Override + public ConfigHistoryInfo detailPreviousConfigHistory(Long id) { + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = (select max(nid) from his_config_info where id = ?) "; +3:1406,1413c + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + } + + @Override + public ConfigHistoryInfo detailPreviousConfigHistory(Long id) { + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = (select max(nid) from his_config_info where id = ?) "; +====1 +1:2292c + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); +2:1437c +3:1416c + .queryForObject(sqlFetchRows, new Object[] {id}, HISTORY_DETAIL_ROW_MAPPER); +====1 +1:2295c + LogUtil.FATAL_LOG.error("[list-config-history] error, nid:{}", new Object[] {nid}, e); +2:1440c +3:1419c + LogUtil.FATAL_LOG.error("[detail-previous-config-history] error, id:{}", new Object[] {id}, e); +====2 +1:2297a +3:1421a +2:1443c + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:2303,2310c + try { + jt.update( + "INSERT INTO tenant_info(kp,tenant_id,tenant_name,tenant_desc,create_source,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?)", + kp, tenantId, tenantName, tenantDesc, createResoure, time, time); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1449,1457c +3:1427,1435c + TenantInfoEntity tenantInfo = new TenantInfoEntity(); + tenantInfo.setKp(kp); + tenantInfo.setTenantId(tenantId); + tenantInfo.setTenantName(tenantName); + tenantInfo.setTenantDesc(tenantDesc); + tenantInfo.setCreateSource(createResoure); + tenantInfo.setGmtCreate(time); + tenantInfo.setGmtModified(time); + tenantInfoRepository.save(tenantInfo); +====1 +1:2315,2322c + try { + jt.update( + "UPDATE tenant_info SET tenant_name = ?, tenant_desc = ?, gmt_modified= ? WHERE kp=? AND tenant_id=?", + tenantName, tenantDesc, System.currentTimeMillis(), kp, tenantId); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1462,1467c +3:1440,1445c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + tenantInfoRepository.findOne(qTenantInfo.kp.eq(kp).and(qTenantInfo.tenantId.eq(tenantId))).ifPresent(s -> { + s.setTenantName(tenantName); + s.setTenantDesc(tenantDesc); + tenantInfoRepository.save(s); + }); +====1 +1:2327,2338c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=?"; + try { + return this.jt.query(sql, new Object[] {kp}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1472,1473c +3:1450,1451c + List list = tenantInfoRepository.findByKp(kp); + return TenantInfoMapStruct.INSTANCE.convertTenantInfoList(list); +====1 +1:2343,2354c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=? AND tenant_id=?"; + try { + return jt.queryForObject(sql, new Object[] {kp, tenantId}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1478,1479c +3:1456,1457c + TenantInfoEntity tenantInfoEntity = tenantInfoRepository.findByKpAndTenantId(kp, tenantId); + return TenantInfoMapStruct.INSTANCE.convertTenantInfo(tenantInfoEntity); +====1 +1:2359,2364c + try { + jt.update("DELETE FROM tenant_info WHERE kp=? AND tenant_id=?", kp, tenantId); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1484,1485c +3:1462,1463c + tenantInfoRepository.findOne(QTenantInfoEntity.tenantInfoEntity.tenantId.eq(tenantId) + .and(QTenantInfoEntity.tenantInfoEntity.kp.eq(kp))).ifPresent(s -> tenantInfoRepository.delete(s)); +====1 +1:2418,2431c + String sqlCountRows = " SELECT COUNT(*) FROM config_info "; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,md5,type,gmt_modified FROM " + + "( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) g, config_info t WHERE g.id = t.id"; + PaginationHelper helper = createPaginationHelper(); + try { + Page page = helper + .fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_WRAPPER_ROW_MAPPER); + + return page.getPageItems(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1539c +3:1517c + return null; +====1 +1:2448,2458c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,type,gmt_modified,md5 FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1556,1560c +3:1534,1538c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + ConfigInfoEntity result = configInfoRepository.findOne( + qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))).orElse(null); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapper(result); +====1 +1:2463,2469c + String sql = String.format("select 1 from %s limit 1", tableName); + try { + jt.queryForObject(sql, Integer.class); + return true; + } catch (Throwable e) { + return false; + } +2:1565c +3:1543c + return true; +====1 +1:2518,2530c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList<>(); + if (!CollectionUtils.isEmpty(ids)) { + where.append(" id in ("); + for (int i = 0; i < ids.size(); i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(ids.get(i)); + } + where.append(") "); +2:1614,1617c +3:1592,1595c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (!org.springframework.util.CollectionUtils.isEmpty(ids)) { + booleanBuilder.and(qConfigInfo.id.in(ids)); +====1 +1:2532,2536c + where.append(" tenant_id=? "); + paramList.add(tenantTmp); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + paramList.add(generateLikeArgument(dataId)); +2:1619,1623c +3:1597,1601c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.like(dataId)); +====1 +1:2539,2540c + where.append(" and group_id=? "); + paramList.add(group); +2:1626c +3:1604c + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:2543,2544c + where.append(" and app_name=? "); + paramList.add(appName); +2:1629c +3:1607c + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:2547,2552c + try { + return this.jt.query(SQL_FIND_ALL_CONFIG_INFO + where, paramList.toArray(), CONFIG_ALL_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1632,1640c +3:1610,1618c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + List resultList = new ArrayList<>(); + configInfos.forEach(s -> { + ConfigAllInfo configAllInfo = new ConfigAllInfo(); + BeanUtils.copyProperties(s, configAllInfo); + configAllInfo.setGroup(s.getGroupId()); + resultList.add(configAllInfo); + }); + return resultList; +====1 +1:2643,2647c + Integer result = this.jt + .queryForObject(SQL_TENANT_INFO_COUNT_BY_TENANT_ID, new String[] {tenantId}, Integer.class); + if (result == null) { + return 0; + } +2:1731,1732c +3:1709,1710c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + Long result = tenantInfoRepository.count(qTenantInfo.tenantId.eq(tenantId)); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_HistoryController.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_HistoryController.java.txt new file mode 100644 index 0000000000..b506cbd739 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_HistoryController.java.txt @@ -0,0 +1,120 @@ +====1 +1:31,33c + import javax.servlet.http.HttpServletRequest; + import javax.servlet.http.HttpServletResponse; + +2:30a +3:30a +====2 +1:42c +3:39c + +2:39c + +====2 +1:45c +3:42c + +2:42c + +====1 +1:49,53c + * @param dataId dataId string value. + * @param group group string value. + * @param tenant tenant string value. + * @param appName appName string value. + * @param pageNo pageNo string value. +2:46,50c +3:46,50c + * @param dataId dataId string value. + * @param group group string value. + * @param tenant tenant string value. + * @param appName appName string value. + * @param pageNo pageNo string value. +====2 +1:60,66c +3:57,63c + @RequestParam("group") String group, // + @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant, + @RequestParam(value = "appName", required = false) String appName, + @RequestParam(value = "pageNo", required = false) Integer pageNo, + // + @RequestParam(value = "pageSize", required = false) Integer pageSize, // + ModelMap modelMap) { +2:57,63c + @RequestParam("group") String group, // + @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant, + @RequestParam(value = "appName", required = false) String appName, + @RequestParam(value = "pageNo", required = false) Integer pageNo, + // + @RequestParam(value = "pageSize", required = false) Integer pageSize, // + ModelMap modelMap) { +====2 +1:73c +3:70c + +2:70c + +====1 +1:75c + * Query the detailed configuration history informations. +2:72,75c +3:72,75c + * Query the detailed configuration history information. + * + * @param nid history_config_info nid + * @return history config info +==== +1:78,79c + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { +2:78,86c + <<<<<<< HEAD + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { + ||||||| a41d209d5 + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { + ======= + public ConfigHistoryInfo getConfigHistoryInfo(@RequestParam("nid") Long nid) { + >>>>>>> TEMP_RIGHT_BRANCH +3:78c + public ConfigHistoryInfo getConfigHistoryInfo(@RequestParam("nid") Long nid) { +====2 +1:81a +3:80a +2:89,93c + <<<<<<< HEAD + + ||||||| a41d209d5 + + ======= +==== +1:82a +2:95,107c + /** + * Query previous config history information. + * + * @param id config_info id + * @return history config info + * @since 1.4.0 + */ + @GetMapping(value = "/previous") + public ConfigHistoryInfo getPreviousConfigHistoryInfo(@RequestParam("id") Long id) { + return persistService.detailPreviousConfigHistory(id); + } + + >>>>>>> TEMP_RIGHT_BRANCH +3:82,93c + /** + * Query previous config history information. + * + * @param id config_info id + * @return history config info + * @since 1.4.0 + */ + @GetMapping(value = "/previous") + public ConfigHistoryInfo getPreviousConfigHistoryInfo(@RequestParam("id") Long id) { + return persistService.detailPreviousConfigHistory(id); + } + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_MergeDatumService.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_MergeDatumService.java.txt new file mode 100644 index 0000000000..1d2e6e10d3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_MergeDatumService.java.txt @@ -0,0 +1,43 @@ +====1 +1:30,31c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:30,32c +3:30,32c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.ApplicationUtils; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:109c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIp()); +2:110c +3:110c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIP()); +====1 +1:117c + if (ApplicationUtils.getStandaloneMode()) { +2:118c +3:118c + if (EnvUtil.getStandaloneMode()) { +==== +1:166c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); +2:167,177c + <<<<<<< HEAD + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn( + "[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); + ||||||| a41d209d5 + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn("[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); + ======= + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIP(), null); +3:167c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIP(), null); +====2 +1:168a +3:169a +2:180c + >>>>>>> TEMP_RIGHT_BRANCH diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_MergeTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_MergeTaskProcessor.java.txt new file mode 100644 index 0000000000..5df60ac501 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_MergeTaskProcessor.java.txt @@ -0,0 +1,47 @@ +==== +1:19a +2:20,26c + <<<<<<< HEAD + import com.alibaba.nacos.common.task.AbstractDelayTask; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; + ======= + import com.alibaba.nacos.common.task.NacosTask; +3:20c + import com.alibaba.nacos.common.task.NacosTask; +==== +1:21c + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:28c + >>>>>>> TEMP_RIGHT_BRANCH +3:21a +====1 +1:22a +2:30c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:31c + import com.alibaba.nacos.core.utils.InetUtils; +2:39c +3:32c + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:55c + public boolean process(AbstractDelayTask task) { +2:63c +3:56c + public boolean process(NacosTask task) { +====1 +1:87c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +2:95c +3:88c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), +====1 +1:101c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +2:109c +3:102c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_NotifyTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_NotifyTaskProcessor.java.txt new file mode 100644 index 0000000000..9dcd6442e3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_NotifyTaskProcessor.java.txt @@ -0,0 +1,67 @@ +==== +1:19a +2:20,26c + <<<<<<< HEAD + import com.alibaba.nacos.common.task.AbstractDelayTask; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; + ======= + import com.alibaba.nacos.common.task.NacosTask; +3:20c + import com.alibaba.nacos.common.task.NacosTask; +==== +1:21c + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:28c + >>>>>>> TEMP_RIGHT_BRANCH +3:21a +====1 +1:22a +2:30c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:27,28c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:35,36c +3:28,29c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:49c + public boolean process(AbstractDelayTask task) { +2:57c +3:50c + public boolean process(NacosTask task) { +====1 +1:76c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +2:84c +3:77c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====1 +1:78c + .format(URL_PATTERN, serverIp, ApplicationUtils.getContextPath(), dataId, group); +2:86c +3:79c + .format(URL_PATTERN, serverIp, EnvUtil.getContextPath(), dataId, group); +====1 +1:82c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:90c +3:83c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====1 +1:92c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:100c +3:93c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====1 +1:100c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:108c +3:101c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_application.properties.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_application.properties.txt new file mode 100644 index 0000000000..d78b420770 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_application.properties.txt @@ -0,0 +1,89 @@ +====1 +1:40,41c + # db.user=nacos + # db.password=nacos +2:40,41c +3:40,41c + # db.user.0=nacos + # db.password.0=nacos +====1 +1:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-fe/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +2:112c +3:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-ui/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +==== +1:177a +2:178,214c + + <<<<<<< HEAD + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + + ||||||| a41d209d5 + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:178,211c + + + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_pom.xml.txt new file mode 100644 index 0000000000..99b5611a8e --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_myers/diff_pom.xml.txt @@ -0,0 +1,169 @@ +====1 +1:25c + 1.4.0-SNAPSHOT +2:25c +3:25c + 1.4.1-SNAPSHOT +====1 +1:39c + nacos-all-1.4.0-SNAPSHOT +2:39c +3:39c + nacos-all-1.4.1-SNAPSHOT +====1 +1:129c + 2.1.16.RELEASE +2:129c +3:129c + 2.1.17.RELEASE +====1 +1:131c + 2.6 +2:130a +3:130a +====1 +1:133c + 2.2 +2:132c +3:132c + 2.6 +====1 +1:144c + 1.7.17 +2:142a +3:142a +====1 +1:170a +2:169,175c +3:169,175c + 1.3.2.beta1 + 1.3.2.beta1 + 1.3.1.Final + 19.3.0.0 + 4.2.1 + 3.4.1 + 1.18.12 +====1 +1:279a +2:285,287c +3:285,287c + **/com/alibaba/nacos/config/server/modules/entity/*.java + **/com/alibaba/nacos/config/server/modules/mapstruct/*.java + **/com/alibaba/nacos/config/server/configuration/datasource/DynamicDataSource.java +==== +1:307c + **/istio/model/**,**/nacos/test/** +2:315,321c + <<<<<<< HEAD + **/istio/model/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** + ||||||| a41d209d5 + **/istio/model/**,**/nacos/test/** + ======= + **/istio/model/**,**/consistency/entity/**,**/nacos/test/** + >>>>>>> TEMP_RIGHT_BRANCH +3:315c + **/istio/model/**,**/consistency/entity/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** +====1 +1:341a +2:356c +3:350c + /console-ui/** +====1 +1:553a +2:569,587c +3:563,581c + + remove-test-data + + + + org.apache.maven.plugins + maven-clean-plugin + + false + + + ${user.home}/nacos/data + + + + + + + +====1 +1:581a +2:616c +3:610c + sys +====1 +1:688a +2:724,728c +3:718,722c + + ${project.groupId} + nacos-sys + ${project.version} + +====1 +1:712,717c + + commons-lang + commons-lang + ${commons-lang.version} + + +2:751a +3:745a +====1 +1:817,822c + + com.ning + async-http-client + ${async-http-client.version} + + +2:850a +3:844a +====1 +1:1027a +2:1056,1093c +3:1050,1087c + + + org.mapstruct + mapstruct-jdk8 + ${mapstruct.version} + + + + org.mapstruct + mapstruct-processor + ${mapstruct.version} + + + + org.projectlombok + lombok + true + ${lombok.version} + + + + com.querydsl + querydsl-jpa + ${querydsl.version} + + + + com.zaxxer + HikariCP + ${hikariCP.version} + + + + com.oracle.ojdbc + ojdbc8 + ${ojdbc.version} + + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_AsyncNotifyService.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_AsyncNotifyService.java.txt new file mode 100644 index 0000000000..f5fe26eb1d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_AsyncNotifyService.java.txt @@ -0,0 +1,83 @@ +====1 +1:35,36c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:35,36c +3:35,36c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:107c + +2:107c +3:107c + +====1 +1:130c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +2:130c +3:130c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +====2 +1:135a +3:135a +2:136,143c + <<<<<<< HEAD + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, + String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); + ||||||| a41d209d5 + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); + ======= +==== +1:137c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +2:145,146c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); + >>>>>>> TEMP_RIGHT_BRANCH +3:137c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====1 +1:157c + +2:166c +3:157c + +====1 +1:171c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +2:180c +3:171c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +====1 +1:177c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +2:186c +3:177c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +====1 +1:199c + InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +2:208c +3:199c + InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +====1 +1:262c + this.url = MessageFormat.format(URL_PATTERN, target, ApplicationUtils.getContextPath(), dataId, group); +2:271c +3:262c + this.url = MessageFormat.format(URL_PATTERN, target, EnvUtil.getContextPath(), dataId, group); +====1 +1:265c + .format(URL_PATTERN_TENANT, target, ApplicationUtils.getContextPath(), dataId, group, tenant); +2:274c +3:265c + .format(URL_PATTERN_TENANT, target, EnvUtil.getContextPath(), dataId, group, tenant); +====1 +1:312c + } +\ No newline at end of file +2:321c +3:312c + } diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_ConfigController.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_ConfigController.java.txt new file mode 100644 index 0000000000..c1619171c1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_ConfigController.java.txt @@ -0,0 +1,193 @@ +====1 +1:18a +2:19c +3:19c + import com.alibaba.nacos.api.config.ConfigType; +====1 +1:22a +2:24c +3:24c + import com.alibaba.nacos.common.model.RestResultUtils; +==== +1:34a +2:37,38c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; + <<<<<<< HEAD +3:37c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +====2 +1:35a +3:38a +2:40,43c + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.result.ResultBuilder; + ======= + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:38c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +2:45a +3:40a +====1 +1:45a +2:53c +3:48c + import com.alibaba.nacos.common.utils.NamespaceUtil; +====1 +1:48c + import com.alibaba.nacos.core.utils.InetUtils; +2:56c +3:51c + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:93,94c + private static final String NAMESPACE_PUBLIC_KEY = "public"; + +2:100a +3:95a +====1 +1:137a +2:144,147c +3:139,142c + //check type + if (!ConfigType.isValidType(type)) { + type = ConfigType.getDefaultType().getType(); + } +====1 +1:178c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:188c +3:183c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:199c + tenant = processTenant(tenant); +2:209c +3:204c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====1 +1:283c + return ResultBuilder.buildSuccessResult(true); +2:293c +3:288c + return RestResultUtils.success(true); +====1 +1:472c + tenant = processTenant(tenant); +2:482c +3:477c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====1 +1:527c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:537c +3:532c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:530,534c + if (StringUtils.isNotBlank(namespace)) { + if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); + } +2:540,543c +3:535,538c + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====1 +1:535a +2:545c +3:540c + +====1 +1:548c + return ResultBuilder.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +2:558c +3:553c + return RestResultUtils.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +====1 +1:560c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +2:570c +3:565c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +====1 +1:584c + return ResultBuilder.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +2:594c +3:589c + return RestResultUtils.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +====1 +1:588c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:598c +3:593c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:601c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:611c +3:606c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:604c + return ResultBuilder.buildSuccessResult("导入成功", saveResult); +2:614c +3:609c + return RestResultUtils.success("导入成功", saveResult); +====1 +1:628c + return ResultBuilder.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +2:638c +3:633c + return RestResultUtils.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +====1 +1:631,634c + + if (NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(namespace)) { + namespace = ""; + } else if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { +2:641,643c +3:636,638c + + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { +====1 +1:636c + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +2:645c +3:640c + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====1 +1:650c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:659c +3:654c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:674c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:683c +3:678c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:687c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:696c +3:691c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:690,697c + return ResultBuilder.buildSuccessResult("Clone Completed Successfully", saveResult); + } + + private String processTenant(String tenant) { + if (StringUtils.isEmpty(tenant) || NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(tenant)) { + return ""; + } + return tenant; +2:699c +3:694c + return RestResultUtils.success("Clone Completed Successfully", saveResult); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_DiskUtils.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_DiskUtils.java.txt new file mode 100644 index 0000000000..edbfb2552d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_DiskUtils.java.txt @@ -0,0 +1,24 @@ +356,364d355 +< <<<<<<< HEAD:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileOutputStream fos = new FileOutputStream( +< outputFile); final CheckedOutputStream cos = new CheckedOutputStream(fos, +< checksum); final ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(cos))) { +< ||||||| a41d209d5:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileOutputStream fos = new FileOutputStream( +< outputFile); final CheckedOutputStream cos = new CheckedOutputStream(fos, checksum); +< final ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(cos))) { +< ======= +368d358 +< >>>>>>> TEMP_RIGHT_BRANCH:sys/src/main/java/com/alibaba/nacos/sys/utils/DiskUtils.java +407,415d396 +< <<<<<<< HEAD:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileInputStream fis = new FileInputStream( +< sourceFile); final CheckedInputStream cis = new CheckedInputStream(fis, +< checksum); final ZipInputStream zis = new ZipInputStream(new BufferedInputStream(cis))) { +< ||||||| a41d209d5:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileInputStream fis = new FileInputStream( +< sourceFile); final CheckedInputStream cis = new CheckedInputStream(fis, checksum); +< final ZipInputStream zis = new ZipInputStream(new BufferedInputStream(cis))) { +< ======= +419d399 +< >>>>>>> TEMP_RIGHT_BRANCH:sys/src/main/java/com/alibaba/nacos/sys/utils/DiskUtils.java diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_EmbeddedRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_EmbeddedRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..465add68ba --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_EmbeddedRolePersistServiceImpl.java.txt @@ -0,0 +1,186 @@ +====1 +1:21c + import com.alibaba.nacos.config.server.service.repository.embedded.EmbeddedStoragePersistServiceImpl; +2:20a +3:20a +====1 +1:23a +2:23c +3:23c + import com.alibaba.nacos.config.server.service.repository.embedded.EmbeddedStoragePersistServiceImpl; +====1 +1:30a +2:31c +3:31c + import java.util.Collections; +====2 +1:43c +3:44c + +2:44c + +====2 +1:46c +3:47c + +2:47c + +====2 +1:49c +3:50c + +2:50c + +====2 +1:51c +3:52c + +2:52c + +====2 +1:53c +3:54c + +2:54c + +====2 +1:56c +3:57c + +2:57c + +====2 +1:58c +3:59c + +2:59c + +====2 +1:68c +3:69c + +2:69c + +====2 +1:70c +3:71c + +2:71c + +====2 +1:72c +3:73c + +2:73c + +====2 +1:74c +3:75c + +2:75c + +==== +1:77,80c + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { +2:78,95c + <<<<<<< HEAD + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + ||||||| a41d209d5 + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + ======= + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { + >>>>>>> TEMP_RIGHT_BRANCH +3:78,83c + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { +====2 +1:82a +3:85a +2:98,104c + <<<<<<< HEAD + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + ||||||| a41d209d5 + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + ======= +==== +1:84c + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, +2:106,107c + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, + >>>>>>> TEMP_RIGHT_BRANCH +3:87c + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, +====2 +1:86c +3:89c + +2:109c + +====2 +1:88c +3:91c + +2:111c + +====2 +1:96c +3:99c + +2:119c + +====2 +1:98c +3:101c + +2:121c + +====2 +1:106c +3:109c + +2:129c + +====2 +1:121c +3:124c + +2:144c + +====2 +1:137c +3:140c + +2:160c + +====2 +1:141c +3:144c + List users = databaseOperate.queryMany(sql, new String[] {"%" + role + "%"}, String.class); +2:164c + List users = databaseOperate.queryMany(sql, new String[]{"%" + role + "%"}, String.class); +====2 +1:144c +3:147c + +2:167c + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_ExternalDataSourceServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_ExternalDataSourceServiceImpl.java.txt new file mode 100644 index 0000000000..12f8b7755c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_ExternalDataSourceServiceImpl.java.txt @@ -0,0 +1,194 @@ +====1 +1:19,39c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import javax.sql.DataSource; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + +2:18a +3:18a +==== +1:40a +2:20,70c + <<<<<<< HEAD + import com.alibaba.nacos.common.utils.StringUtils; + import com.alibaba.nacos.config.server.monitor.MetricsMonitor; + import com.alibaba.nacos.config.server.utils.PropertyUtil; + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.zaxxer.hikari.HikariDataSource; + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + ||||||| a41d209d5 + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import javax.sql.DataSource; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import com.alibaba.nacos.common.utils.ConvertUtils; + import com.alibaba.nacos.common.utils.StringUtils; + import com.alibaba.nacos.config.server.monitor.MetricsMonitor; + import com.alibaba.nacos.config.server.utils.ConfigExecutor; + import com.alibaba.nacos.config.server.utils.PropertyUtil; + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.zaxxer.hikari.HikariDataSource; + ======= + import com.alibaba.nacos.common.utils.IPUtil; +3:20c + import com.alibaba.nacos.common.utils.IPUtil; +====1 +1:45c + import com.alibaba.nacos.core.utils.ApplicationUtils; +2:75c +3:25c + import com.alibaba.nacos.sys.env.EnvUtil; +==== +1:46a +2:77,92c + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + >>>>>>> TEMP_RIGHT_BRANCH + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +3:27,41c + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +====1 +1:55,58c + private static final Logger LOGGER = LoggerFactory.getLogger(ExternalDataSourceServiceImpl.class); + + private static final String JDBC_DRIVER_NAME = "com.mysql.cj.jdbc.Driver"; + +2:100a +3:49a +====1 +1:86,87c + private static Pattern ipPattern = Pattern.compile("\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}"); + +2:127a +3:76a +====1 +1:120,122c + + ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +2:160,162c +3:109,111c + + // ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + // ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +====1 +1:130c + .build(ApplicationUtils.getEnvironment(), (dataSource) -> { +2:170c +3:119c + .build(EnvUtil.getEnvironment(), (dataSource) -> { +====1 +1:193c + return "DOWN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +2:233c +3:182c + return "DOWN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====1 +1:196c + return "WARN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +2:236c +3:185c + return "WARN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====1 +1:204,217c + private String getIpFromUrl(String url) { + + Matcher m = ipPattern.matcher(url); + if (m.find()) { + return m.group(); + } + + return ""; + } + + static String defaultIfNull(String value, String defaultValue) { + return null == value ? defaultValue : value; + } + +2:243a +3:192a +====1 +1:272c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +2:298c +3:247c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); +====1 +1:275c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +2:301c +3:250c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_ExternalPermissionPersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_ExternalPermissionPersistServiceImpl.java.txt new file mode 100644 index 0000000000..3c95b4eb48 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_ExternalPermissionPersistServiceImpl.java.txt @@ -0,0 +1,215 @@ +==== +1:21c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +2:21,31c + <<<<<<< HEAD + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + ======= +3:20a +====1 +1:22a +2:33c +3:22c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +==== +1:24a +2:36c + >>>>>>> TEMP_RIGHT_BRANCH +3:25,28c + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; +====1 +1:27,28c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; +2:39c +3:31c + import org.springframework.data.domain.PageRequest; +====2 +1:30a +3:33a +2:42,49c + <<<<<<< HEAD + ||||||| a41d209d5 + import javax.annotation.PostConstruct; + import java.util.ArrayList; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.PERMISSION_ROW_MAPPER; + + ======= +====1 +1:32a +2:52,53c +3:36,37c + import java.util.Collections; + import java.util.List; +====2 +1:35a +3:40a +2:57c + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:46,53c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:68c +3:51c + private PermissionsRepository permissionsRepository; +==== +1:56,83c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role='" + role + "' "; + + if (StringUtils.isBlank(role)) { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:71,140c + <<<<<<< HEAD + org.springframework.data.domain.Page sPage = permissionsRepository + .findAll(QPermissionsEntity.permissionsEntity.role.eq(role), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(PermissionsMapStruct.INSTANCE.convertPermissionInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + ||||||| a41d209d5 + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role='" + role + "' "; + + if (StringUtils.isBlank(role)) { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + ======= + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(role)) { + params = Collections.singletonList(role); + } else { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:54,61c + org.springframework.data.domain.Page sPage = permissionsRepository + .findAll(QPermissionsEntity.permissionsEntity.role.eq(role), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(PermissionsMapStruct.INSTANCE.convertPermissionInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:94,102c + + String sql = "INSERT into permissions (role, resource, action) VALUES (?, ?, ?)"; + + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:151,152c +3:72,73c + + permissionsRepository.save(new PermissionsEntity(role, resource, action)); +====1 +1:113,120c + + String sql = "DELETE from permissions WHERE role=? and resource=? and action=?"; + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:163,167c +3:84,88c + + QPermissionsEntity qPermissions = QPermissionsEntity.permissionsEntity; + permissionsRepository.findOne( + qPermissions.role.eq(role).and(qPermissions.resource.eq(resource)).and(qPermissions.action.eq(action))) + .ifPresent(p -> permissionsRepository.delete(p)); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_ExternalRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_ExternalRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..b2b75323b7 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_ExternalRolePersistServiceImpl.java.txt @@ -0,0 +1,283 @@ +==== +1:21c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +2:21,31c + <<<<<<< HEAD + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + ======= +3:20a +====1 +1:22a +2:33c +3:22c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +==== +1:24a +2:36c + >>>>>>> TEMP_RIGHT_BRANCH +3:25,28c + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; +====1 +1:27,29c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.RowMapper; +2:39c +3:31c + import org.springframework.data.domain.PageRequest; +====2 +1:31a +3:33a +2:42,48c + <<<<<<< HEAD + ||||||| a41d209d5 + import javax.annotation.PostConstruct; + import java.sql.ResultSet; + import java.sql.SQLException; + import java.util.ArrayList; + ======= +==== +1:35a +2:53,54c + import java.util.Collections; + >>>>>>> TEMP_RIGHT_BRANCH +3:38c + import java.util.Collections; +====1 +1:36a +2:56c +3:40c + import java.util.stream.Collectors; +====1 +1:38c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.ROLE_INFO_ROW_MAPPER; +2:57a +3:41a +====1 +1:50,57c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:69c +3:53c + private RolesRepository rolesRepository; +====1 +1:61,81c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from (select distinct role from roles) roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " 1=1 "; + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + return pageInfo; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:73,80c +3:57,64c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +==== +1:86,104c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + where = " 1=1 "; + } + + try { + return helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:85,135c + <<<<<<< HEAD + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(QRolesEntity.rolesEntity.username.eq(username), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + ||||||| a41d209d5 + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + where = " 1=1 "; + } + + try { + return helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + ======= + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { + where = " 1=1 "; + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:69,76c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(QRolesEntity.rolesEntity.username.eq(username), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:110c + * @param role role string value. +2:141c +3:82c + * @param role role string value. +====1 +1:115,122c + String sql = "INSERT into roles (role, username) VALUES (?, ?)"; + + try { + jt.update(sql, role, userName); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:146c +3:87c + rolesRepository.save(new RolesEntity(userName, role)); +====1 +1:131,137c + String sql = "DELETE from roles WHERE role=?"; + try { + jt.update(sql, role); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:155,156c +3:96,97c + Iterable iterable = rolesRepository.findAll(QRolesEntity.rolesEntity.role.eq(role)); + rolesRepository.deleteAll(iterable); +====1 +1:143c + * @param role role string value. +2:162c +3:103c + * @param role role string value. +====1 +1:147,153c + String sql = "DELETE from roles WHERE role=? and username=?"; + try { + jt.update(sql, role, username); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:166,168c +3:107,109c + QRolesEntity qRoles = QRolesEntity.rolesEntity; + rolesRepository.findOne(qRoles.role.eq(role).and(qRoles.username.eq(username))) + .ifPresent(s -> rolesRepository.delete(s)); +==== +1:158,160c + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[]{role}, String.class); + return users; +2:173,185c + <<<<<<< HEAD + List rolesEntities = (List) rolesRepository + .findAll(QRolesEntity.rolesEntity.role.like(role)); + return rolesEntities.stream().map(s -> s.getRole()).collect(Collectors.toList()); + ||||||| a41d209d5 + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[]{role}, String.class); + return users; + ======= + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[] {role}, String.class); + return users; + >>>>>>> TEMP_RIGHT_BRANCH +3:114,116c + List rolesEntities = (List) rolesRepository + .findAll(QRolesEntity.rolesEntity.role.like(role)); + return rolesEntities.stream().map(s -> s.getRole()).collect(Collectors.toList()); +====1 +1:163,172c + private static final class RoleInfoRowMapper implements RowMapper { + + @Override + public RoleInfo mapRow(ResultSet rs, int rowNum) throws SQLException { + RoleInfo roleInfo = new RoleInfo(); + roleInfo.setRole(rs.getString("role")); + roleInfo.setUsername(rs.getString("username")); + return roleInfo; + } + } +2:187a +3:118a diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_ExternalStoragePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_ExternalStoragePersistServiceImpl.java.txt new file mode 100644 index 0000000000..44c1365407 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_ExternalStoragePersistServiceImpl.java.txt @@ -0,0 +1,3017 @@ +====1 +1:41,42c + import com.alibaba.nacos.config.server.service.datasource.DataSourceService; + import com.alibaba.nacos.config.server.service.datasource.DynamicDataSource; +2:41,74c +3:41,74c + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.HisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.QHisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QTenantInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.TenantInfoEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAdvanceInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAllInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigHistoryInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4BetaMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4TagMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoAggrMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoBetaWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoChangedMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoEntityMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoTagWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.TenantInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoAggrRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoBetaRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoTagRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigTagsRelationRepository; + import com.alibaba.nacos.config.server.modules.repository.HisConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.TenantInfoRepository; +====1 +1:48c + import com.google.common.collect.Lists; +2:80,81c +3:80,81c + import com.querydsl.core.BooleanBuilder; + import lombok.extern.slf4j.Slf4j; +====1 +1:50a +2:84,85c +3:84,85c + import org.springframework.beans.BeanUtils; + import org.springframework.beans.factory.annotation.Autowired; +====1 +1:55c + import org.springframework.dao.IncorrectResultSizeDataAccessException; +2:90,92c +3:90,92c + import org.springframework.data.domain.PageRequest; + import org.springframework.data.domain.Sort; + import org.springframework.data.jpa.domain.Specification; +====1 +1:57,61c + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.PreparedStatementCreator; + import org.springframework.jdbc.core.PreparedStatementSetter; + import org.springframework.jdbc.support.GeneratedKeyHolder; + import org.springframework.jdbc.support.KeyHolder; +2:93a +3:93a +====1 +1:70c + import javax.annotation.PostConstruct; +2:102,105c +3:102,105c + import javax.persistence.criteria.CriteriaBuilder; + import javax.persistence.criteria.CriteriaQuery; + import javax.persistence.criteria.Predicate; + import javax.persistence.criteria.Root; +====1 +1:72,75c + import java.sql.Connection; + import java.sql.PreparedStatement; + import java.sql.SQLException; + import java.sql.Statement; +2:106a +3:106a +====1 +1:81a +2:113c +3:113c + import java.util.stream.Collectors; +====1 +1:83,97c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ADVANCE_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ALL_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4TAG_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_AGGR_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BASE_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_CHANGED_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_KEY_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_DETAIL_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_LIST_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.TENANT_INFO_ROW_MAPPER; +2:114a +3:114a +====1 +1:104a +2:122c +3:122c + @Slf4j +====1 +1:110c + private DataSourceService dataSourceService; +2:128,129c +3:128,129c + @Autowired + private ConfigInfoRepository configInfoRepository; +====1 +1:112c + private static final String SQL_FIND_ALL_CONFIG_INFO = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,c_schema from config_info"; +2:131,132c +3:131,132c + @Autowired + private ConfigInfoBetaRepository configInfoBetaRepository; +====1 +1:114c + private static final String SQL_TENANT_INFO_COUNT_BY_TENANT_ID = "select count(1) from tenant_info where tenant_id = ?"; +2:134,135c +3:134,135c + @Autowired + private ConfigInfoTagRepository configInfoTagRepository; +====1 +1:116c + private static final String SQL_FIND_CONFIG_INFO_BY_IDS = "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5 FROM config_info WHERE "; +2:137,138c +3:137,138c + @Autowired + private ConfigTagsRelationRepository configTagsRelationRepository; +====1 +1:118c + private static final String SQL_DELETE_CONFIG_INFO_BY_IDS = "DELETE FROM config_info WHERE "; +2:140,141c +3:140,141c + @Autowired + private HisConfigInfoRepository hisConfigInfoRepository; +====1 +1:120c + private static final String PATTERN_STR = "*"; +2:143,144c +3:143,144c + @Autowired + private TenantInfoRepository tenantInfoRepository; +====1 +1:122c + private static final int QUERY_LIMIT_SIZE = 50; +2:146,147c +3:146,147c + @Autowired + private ConfigInfoAggrRepository configInfoAggrRepository; +====1 +1:124,126c + protected JdbcTemplate jt; + + protected TransactionTemplate tjt; +2:149,150c +3:149,150c + @Autowired + private TransactionTemplate tjt; +====1 +1:133,171c + /** + * init datasource. + */ + @PostConstruct + public void init() { + dataSourceService = DynamicDataSource.getInstance().getDataSource(); + + jt = getJdbcTemplate(); + tjt = getTransactionTemplate(); + } + + public boolean checkMasterWritable() { + return dataSourceService.checkMasterWritable(); + } + + public void setBasicDataSourceService(DataSourceService dataSourceService) { + this.dataSourceService = dataSourceService; + } + + public synchronized void reload() throws IOException { + this.dataSourceService.reload(); + } + + /** + * For unit testing. + */ + public JdbcTemplate getJdbcTemplate() { + return this.dataSourceService.getJdbcTemplate(); + } + + public TransactionTemplate getTransactionTemplate() { + return this.dataSourceService.getTransactionTemplate(); + } + + @SuppressWarnings("checkstyle:AbbreviationAsWordInName") + public String getCurrentDBUrl() { + return this.dataSourceService.getCurrentDbUrl(); + } + +2:156a +3:156a +====1 +1:174c + return new ExternalStoragePaginationHelperImpl(jt); +2:159c +3:159c + return null; +====1 +1:182,191c + boolean result = tjt.execute(status -> { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:167,183c +3:167,183c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfoEntity.getGroupId(), + configInfoEntity.getTenantId()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; + } + return Boolean.TRUE; +====1 +1:193c + return Boolean.TRUE; +2:184a +3:184a +====1 +1:202c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:193c +3:193c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:204,207c + jt.update("INSERT INTO config_info_beta(data_id,group_id,tenant_id,app_name,content,md5,beta_ips,src_ip," + + "src_user,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, appNameTmp, configInfo.getContent(), md5, betaIps, srcIp, srcUser, + time, time); +2:195,207c +3:195,207c + ConfigInfoBetaEntity configInfoBeta = new ConfigInfoBetaEntity(); + configInfoBeta.setDataId(configInfo.getDataId()); + configInfoBeta.setGroupId(configInfo.getGroup()); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setBetaIps(betaIps); + configInfoBeta.setMd5(md5); + configInfoBeta.setGmtCreate(time); + configInfoBeta.setGmtModified(time); + configInfoBeta.setSrcUser(srcUser); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setTenantId(tenantTmp); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:209c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:209c +3:209c + log.error("[db-error] " + e.toString(), e); +====1 +1:220c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:219a +3:219a +====1 +1:222,226c + jt.update( + "INSERT INTO config_info_tag(data_id,group_id,tenant_id,tag_id,app_name,content,md5,src_ip,src_user," + + "gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, tagTmp, appNameTmp, configInfo.getContent(), md5, srcIp, srcUser, + time, time); +2:221,234c +3:221,234c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoTagEntity configInfoTag = new ConfigInfoTagEntity(); + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:228c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:236c +3:236c + log.error("[db-error] " + e.toString(), e); +====1 +1:236,254c + boolean result = tjt.execute(status -> { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + /* + If the appName passed by the user is not empty, use the persistent user's appName, + otherwise use db; when emptying appName, you need to pass an empty string + */ + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // delete all tags and then recreate + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); +2:244,268c +3:244,268c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + // 用户传过来的appName不为空,则用持久化用户的appName,否则用db的;清空appName的时候需要传空串 + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + configInfo.setId(oldConfigInfo.getId()); + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // 删除所有tag,然后再重新创建 + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); + } + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:256,259c + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:270c +3:270c + return Boolean.TRUE; +====1 +1:261c + return Boolean.TRUE; +2:271a +3:271a +====1 +1:268c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); +2:277a +3:277a +====1 +1:270,275c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + try { + jt.update( + "UPDATE config_info_beta SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5, srcIp, srcUser, + time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp); +2:279,300c +3:279,300c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(configInfo.getDataId())) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(configInfo.getDataId())); + } + if (StringUtils.isNotBlank(configInfo.getGroup())) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(configInfo.getGroup())); + } + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenantTmp)); + } + ConfigInfoBetaEntity configInfoBeta = configInfoBetaRepository.findOne(booleanBuilder).orElse(null); + try { + String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setMd5(md5); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setSrcUser(srcUser); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:277c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:302c +3:302c + log.error("[db-error] " + e.toString(), e); +====1 +1:288,293c + try { + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + jt.update( + "UPDATE config_info_tag SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", configInfo.getContent(), md5, + srcIp, srcUser, time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp, tagTmp); +2:313,331c +3:313,331c + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + ConfigInfoTagEntity configInfoTag = configInfoTagRepository.findOne( + qConfigInfoTag.dataId.eq(configInfo.getDataId()).and(qConfigInfoTag.groupId.eq(configInfo.getGroup())) + .and(qConfigInfoTag.tenantId.eq(tenantTmp)).and(qConfigInfoTag.tagId.eq(tagTmp))) + .orElse(new ConfigInfoTagEntity()); + try { + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:295c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:333c +3:333c + log.error("[db-error] " + e.toString(), e); +====1 +1:323,330c + try { + jt.update( + "UPDATE config_info SET md5 = ? WHERE data_id=? AND group_id=? AND tenant_id=? AND gmt_modified=?", + md5, dataId, group, tenantTmp, lastTime); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:361,378c +3:361,378c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (lastTime != null) { + booleanBuilder.and(qConfigInfo.gmtModified.eq(lastTime)); + } + configInfoRepository.findOne(booleanBuilder).ifPresent(config -> { + config.setMd5(md5); + configInfoRepository.save(config); + }); +====1 +1:416,421c + tjt.execute(status -> { + try { + ConfigInfo configInfo = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo != null) { + jt.update("DELETE FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, + group, tenantTmp); +2:464,474c +3:464,474c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo4Beta configInfo4Beta = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo4Beta != null) { + configInfoBetaRepository.deleteById(configInfo4Beta.getId()); + } + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:423,425c + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:476c +3:476c + return Boolean.TRUE; +====1 +1:427c + return Boolean.TRUE; +2:477a +3:477a +====1 +1:439,442c + String select = "SELECT content FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + String insert = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + String update = "UPDATE config_info_aggr SET content = ? , gmt_modified = ? WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + +2:489,502c +3:489,502c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } +====1 +1:445,446c + String dbContent = jt + .queryForObject(select, new Object[] {dataId, group, tenantTmp, datumId}, String.class); +2:505c +3:505c + ConfigInfoAggrEntity result = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); +====1 +1:448c + if (dbContent != null && dbContent.equals(content)) { +2:507c +3:507c + if (result.getContent() != null && result.getContent().equals(content)) { +====1 +1:451c + return jt.update(update, content, now, dataId, group, tenantTmp, datumId) > 0; +2:510,513c +3:510,513c + result.setContent(content); + result.setGmtModified(now); + configInfoAggrRepository.save(result); + return true; +====1 +1:454c + return jt.update(insert, dataId, group, tenantTmp, datumId, appNameTmp, content, now) > 0; +2:516,526c +3:516,526c + ConfigInfoAggrEntity configInfoAggrEntity = new ConfigInfoAggrEntity(); + configInfoAggrEntity.setDataId(dataId); + configInfoAggrEntity.setGroupId(group); + configInfoAggrEntity.setDatumId(datumId); + configInfoAggrEntity.setContent(content); + configInfoAggrEntity.setGmtModified(now); + configInfoAggrEntity.setAppName(appNameTmp); + configInfoAggrEntity.setTenantId(tenantTmp); + configInfoAggrRepository.save(configInfoAggrEntity); + return true; + +====1 +1:465,466c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; +2:536a +3:536a +====1 +1:468,482c + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index++, tenantTmp); + ps.setString(index, datumId); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:537a +3:537a +====1 +1:487,502c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=?"; + + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index, tenantTmp); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:542,545c +3:542,545c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); +====1 +1:503a +2:547,553c +3:547,553c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + configInfoAggrRepository.findOne(booleanBuilder).ifPresent(aggr -> configInfoAggrRepository.delete(aggr)); +====1 +1:509,523c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final StringBuilder datumString = new StringBuilder(); + for (String datum : datumList) { + datumString.append("'").append(datum).append("',"); + } + datumString.deleteCharAt(datumString.length() - 1); + final String sql = + "delete from config_info_aggr where data_id=? and group_id=? and tenant_id=? and datum_id in (" + + datumString.toString() + ")"; + try { + jt.update(sql, dataId, group, tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:558a +3:558a +====1 +1:529,536c + String sql = "delete from his_config_info where gmt_modified < ? limit ?"; + PaginationHelper helper = createPaginationHelper(); + try { + helper.updateLimit(sql, new Object[] {startTime, limitSize}); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:564,567c +3:564,567c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository + .findAll(qHisConfigInfo.gmtModified.lt(startTime), PageRequest.of(0, limitSize)); + hisConfigInfoRepository.deleteAll(iterable); +====1 +1:541,542c + String sql = "SELECT COUNT(*) FROM his_config_info WHERE gmt_modified < ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {startTime}); +2:572,573c +3:572,573c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Long result = hisConfigInfoRepository.count(qHisConfigInfo.gmtModified.lt(startTime)); +====1 +1:551c + String sql = "SELECT max(id) FROM config_info"; +2:581a +3:581a +====1 +1:553c + return jt.queryForObject(sql, Long.class); +2:583,584c +3:583,584c + //TODO 关系型特性查询 + return configInfoRepository.findConfigMaxId(); +====1 +1:591,617c + try { + Boolean isReplaceOk = tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + String appNameTmp = appName == null ? "" : appName; + removeAggrConfigInfo(dataId, group, tenant); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + for (Map.Entry datumEntry : datumMap.entrySet()) { + jt.update(sql, dataId, group, tenantTmp, datumEntry.getKey(), appNameTmp, + datumEntry.getValue(), new Timestamp(System.currentTimeMillis())); + } + } catch (Throwable e) { + throw new TransactionSystemException("error in addAggrConfigInfo"); + } + return Boolean.TRUE; + } + }); + if (isReplaceOk == null) { + return false; + } + return isReplaceOk; + } catch (TransactionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:622c +3:622c + return true; +====1 +1:624,636c + String sql = "SELECT DISTINCT data_id, group_id FROM config_info"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:629c +3:629c + return null; +====1 +1:641,651c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,beta_ips FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO4BETA_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:634,647c +3:634,647c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenant)); + } + ConfigInfoBetaEntity configInfoBetaEntity = configInfoBetaRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoBeta data null")); + return ConfigInfo4BetaMapStruct.INSTANCE.convertConfigInfo4Beta(configInfoBetaEntity); +====1 +1:659,668c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,tag_id,app_name,content FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", + new Object[] {dataId, group, tenantTmp, tagTmp}, CONFIG_INFO4TAG_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:655,671c +3:655,671c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + ConfigInfoTagEntity result = configInfoTagRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoTag data null")); + return ConfigInfo4TagMapStruct.INSTANCE.convertConfigInfo4Tag(result); +====1 +1:674,684c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=? AND app_name=?", + new Object[] {dataId, group, tenantTmp, appName}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:677c +3:677c + return null; +====1 +1:690,733c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(group); + paramList.add(tenantTmp); + + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and group_id=? and tenant_id=? "); + if (StringUtils.isNotBlank(configTags)) { + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.group_id=? and a.tenant_id=? "); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sql.append(", "); + } + sql.append("?"); + paramList.add(tagArr[i]); + } + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return this.jt.queryForObject(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:683c +3:683c + return null; +====1 +1:739,748c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, StringUtils.EMPTY}, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:689,695c +3:689,695c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + return configInfoRepository.findOne(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group))).map(s -> { + ConfigInfoBase configInfoBase = new ConfigInfoBase(); + BeanUtils.copyProperties(s, configInfoBase); + configInfoBase.setGroup(s.getGroupId()); + return configInfoBase; + }).orElse(null); +====1 +1:753,762c + try { + return this.jt + .queryForObject("SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE ID=?", + new Object[] {id}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:700,701c +3:700,701c + + return null; +====1 +1:767,776c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5,type FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:706,712c +3:706,712c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:777a +2:714,718c +3:714,718c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity result = configInfoRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfo(result); +====1 +1:783,792c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:724,725c +3:724,725c + + return null; +====1 +1:798,807c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? and app_name=?", + new Object[] {dataId, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:731,732c +3:731,732c + + return null; +====1 +1:813,864c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where data_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:738c +3:738c + return null; +====1 +1:870,871c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); +2:743a +3:743a +====1 +1:874,922c + String sqlCount = "select count(*) from config_info"; + String sql = "select ID,data_id,group_id,tenant_id,app_name,content,type from config_info"; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id"; + sql = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id"; + + where.append(" a.tenant_id=? "); + + if (StringUtils.isNotBlank(dataId)) { + where.append(" and a.data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and a.group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and a.app_name=? "); + paramList.add(appName); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id=? "); + if (StringUtils.isNotBlank(dataId)) { + where.append(" and data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and app_name=? "); + paramList.add(appName); + } +2:746,765c +3:746,765c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + } + + private void buildConfigInfoCommonCondition(BooleanBuilder booleanBuilder, QConfigInfoEntity qConfigInfo, + final String dataId, final String group, final String appName) { + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); +====1 +1:924,929c + try { + return helper.fetchPage(sqlCount + where, sql + where, paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:767,771c +3:767,771c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(appName)) { + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:935,943c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:777c +3:777c + return null; +====1 +1:949,958c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:783c +3:783c + return null; +====1 +1:964,973c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=? and app_name =?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? and app_name =?", + new Object[] {group, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:789c +3:789c + return null; +====1 +1:979,1032c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder( + "select count(*) from config_info where group_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(group); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:795c +3:795c + return null; +====1 +1:1038,1047c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where tenant_id like ? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? and app_name=?", + new Object[] {generateLikeArgument(tenantTmp), appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:801c +3:801c + return null; +====1 +1:1053,1104c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where tenant_id like ? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:807c +3:807c + return null; +====1 +1:1110,1118c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:813c +3:813c + return null; +====1 +1:1123,1124c + String sql = " SELECT COUNT(ID) FROM config_info "; + Integer result = jt.queryForObject(sql, Integer.class); +2:818c +3:818c + Long result = configInfoRepository.count(); +====1 +1:1133,1134c + String sql = " SELECT COUNT(ID) FROM config_info where tenant_id like ?"; + Integer result = jt.queryForObject(sql, new Object[] {tenant}, Integer.class); +2:827,828c +3:827,828c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.tenantId.like(tenant)); +====1 +1:1143,1144c + String sql = " SELECT COUNT(ID) FROM config_info_beta "; + Integer result = jt.queryForObject(sql, Integer.class); +2:837c +3:837c + Long result = configInfoBetaRepository.count(); +====1 +1:1153,1154c + String sql = " SELECT COUNT(ID) FROM config_info_tag "; + Integer result = jt.queryForObject(sql, Integer.class); +2:846c +3:846c + Long result = configInfoTagRepository.count(); +====1 +1:1162,1165c + public List getTenantIdList(int page, int pageSize) { + String sql = "SELECT tenant_id FROM config_info WHERE tenant_id != '' GROUP BY tenant_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:854,864c +3:854,864c + public List getTenantIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("tenantId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1169,1172c + public List getGroupIdList(int page, int pageSize) { + String sql = "SELECT group_id FROM config_info WHERE tenant_id ='' GROUP BY group_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:868,878c +3:868,878c + public List getGroupIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("groupId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1178,1179c + String sql = " SELECT COUNT(ID) FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {dataId, group, tenantTmp}); +2:884,886c +3:884,886c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))); +====1 +1:1188,1213c + if (datumIds == null || datumIds.isEmpty()) { + return 0; + } + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder sql = new StringBuilder( + " SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ? and datum_id"); + if (isIn) { + sql.append(" in ("); + } else { + sql.append(" not in ("); + } + for (int i = 0, size = datumIds.size(); i < size; i++) { + if (i > 0) { + sql.append(", "); + } + sql.append("?"); + } + sql.append(")"); + + List objectList = Lists.newArrayList(dataId, group, tenantTmp); + objectList.addAll(datumIds); + Integer result = jt.queryForObject(sql.toString(), Integer.class, objectList.toArray()); + if (result == null) { + throw new IllegalArgumentException("aggrConfigInfoCount error"); + } + return result.intValue(); +2:895c +3:895c + return 0; +====1 +1:1228,1242c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5 " + + " FROM ( SELECT id FROM config_info WHERE tenant_id like ? ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, + new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:910c +3:910c + return null; +====1 +1:1247,1282c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String select = " SELECT data_id,group_id,app_name FROM ( " + + " SELECT id FROM config_info WHERE tenant_id LIKE ? ORDER BY id LIMIT ?, ? )" + + " g, config_info t WHERE g.id = t.id "; + + final int totalCount = configInfoCount(tenant); + int pageCount = totalCount / pageSize; + if (totalCount > pageSize * pageCount) { + pageCount++; + } + + if (pageNo > pageCount) { + return null; + } + + final Page page = new Page(); + page.setPageNumber(pageNo); + page.setPagesAvailable(pageCount); + page.setTotalCount(totalCount); + + try { + List result = jt + .query(select, new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, + // new Object[0], + CONFIG_KEY_ROW_MAPPER); + + for (ConfigKey item : result) { + page.getPageItems().add(item); + } + return page; + } catch (EmptyResultDataAccessException e) { + return page; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:915c +3:915c + return null; +====1 +1:1288,1300c + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,content,md5" + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) " + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:921c +3:921c + return null; +====1 +1:1305,1319c + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = " SELECT t.id,type,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + + List params = new ArrayList(); + + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, params.toArray(), pageNo, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:926c +3:926c + return null; +====1 +1:1324,1332c + String select = "SELECT id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,type from config_info where id > ? order by id asc limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(select, new Object[] {lastMaxId, 0, pageSize}, 1, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:931,940c +3:931,940c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(qConfigInfo.id.gt(lastMaxId), PageRequest.of(0, pageSize, Sort.by(Sort.Order.asc("id")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1337,1349c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_beta"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,beta_ips " + + " FROM ( SELECT id FROM config_info_beta ORDER BY id LIMIT ?,? )" + + " g, config_info_beta t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:945,952c +3:945,952c + org.springframework.data.domain.Page sPage = configInfoBetaRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoBetaWrapperMapStruct.INSTANCE.convertConfigInfoBetaWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1354,1366c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_tag"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,tag_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info_tag ORDER BY id LIMIT ?,? ) " + + "g, config_info_tag t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:957,964c +3:957,964c + org.springframework.data.domain.Page sPage = configInfoTagRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoTagWrapperMapStruct.INSTANCE.convertConfigInfoTagWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1372,1414c + // assert dataids group not null + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + // if dataids empty return empty list + if (CollectionUtils.isEmpty(dataIds)) { + return Collections.emptyList(); + } + + // Batch query limit + // The number of in is controlled within 100, the shorter the length of the SQL statement, the better + if (subQueryLimit > QUERY_LIMIT_SIZE) { + subQueryLimit = 50; + } + List result = new ArrayList(dataIds.size()); + + String sqlStart = "select data_id, group_id, tenant_id, app_name, content from config_info where group_id = ? and tenant_id = ? and data_id in ("; + String sqlEnd = ")"; + StringBuilder subQuerySql = new StringBuilder(); + + for (int i = 0; i < dataIds.size(); i += subQueryLimit) { + // dataids + List params = new ArrayList( + dataIds.subList(i, i + subQueryLimit < dataIds.size() ? i + subQueryLimit : dataIds.size())); + + for (int j = 0; j < params.size(); j++) { + subQuerySql.append("?"); + if (j != params.size() - 1) { + subQuerySql.append(","); + } + } + + // group + params.add(0, group); + params.add(1, tenantTmp); + + List r = this.jt + .query(sqlStart + subQuerySql.toString() + sqlEnd, params.toArray(), CONFIG_INFO_ROW_MAPPER); + + // assert not null + if (r != null && r.size() > 0) { + result.addAll(r); + } + } + return result; +2:970c +3:970c + return null; +====1 +1:1420,1463c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + if (StringUtils.isBlank(appName)) { + return this.findAllConfigInfo(pageNo, pageSize, tenantTmp); + } else { + return this.findConfigInfoByApp(pageNo, pageSize, tenantTmp, appName); + } + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + where += " and tenant_id like ? "; + params.add(generateLikeArgument(tenantTmp)); + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:976c +3:976c + return null; +====1 +1:1469,1562c + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + for (ConfigKey configInfo : configKeys) { + String dataId = configInfo.getDataId(); + String group = configInfo.getGroup(); + String appName = configInfo.getAppName(); + + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + return helper.fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:982c +3:982c + return null; +====1 +1:1572,1636c + PaginationHelper helper = createPaginationHelper(); + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info"; + StringBuilder where = new StringBuilder(" where "); + List params = new ArrayList(); + params.add(generateLikeArgument(tenantTmp)); + if (StringUtils.isNotBlank(configTags)) { + sqlCountRows = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id "; + sqlFetchRows = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join config_tags_relation b on a.id=b.id "; + + where.append(" a.tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and a.data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and a.group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and a.app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and a.content like ? "); + params.add(generateLikeArgument(content)); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + params.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and content like ? "); + params.add(generateLikeArgument(content)); + } + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:992,1008c +3:992,1008c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.like(tenant)); + } + if (StringUtils.isNotBlank(content)) { + booleanBuilder.and(qConfigInfo.content.like(content)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1642,1672c + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + throw new IOException("invalid param"); + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,content from config_info where "; + String where = " 1=1 and tenant_id='' "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1014c +3:1014c + return null; +====1 +1:1678,1691c + String sql = "SELECT id,data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; + + try { + return this.jt + .queryForObject(sql, new Object[] {dataId, group, tenantTmp, datumId}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + // EmptyResultDataAccessException, indicating that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); +2:1020,1026c +3:1020,1026c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); +====1 +1:1692a +2:1028,1036c +3:1028,1036c + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenantTmp)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } + + ConfigInfoAggrEntity configInfoAggrEntity = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggr(configInfoAggrEntity); +====1 +1:1697,1710c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "SELECT data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? ORDER BY datum_id"; + + try { + return this.jt.query(sql, new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1041c +3:1041c + return null; +====1 +1:1717,1730c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where data_id=? and " + + "group_id=? and tenant_id=? order by datum_id limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, new Object[] {dataId, group, tenantTmp}, sqlFetchRows, + new Object[] {dataId, group, tenantTmp, (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_AGGR_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1048,1058c +3:1048,1058c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + org.springframework.data.domain.Page sPage = configInfoAggrRepository.findAll( + qConfigInfoAggr.dataId.eq(dataId).and(qConfigInfoAggr.groupId.eq(group)) + .and(qConfigInfoAggr.tenantId.eq(tenantTmp)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.by("datumId")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggrList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1737,1831c + String sqlCountRows = "select count(*) from config_info_aggr where "; + String sqlFetchRows = "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + + for (ConfigKey configInfoAggr : configKeys) { + String dataId = configInfoAggr.getDataId(); + String group = configInfoAggr.getGroup(); + String appName = configInfoAggr.getAppName(); + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + Page result = helper + .fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_AGGR_ROW_MAPPER); + return result; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1065c +3:1065c + return null; +====1 +1:1836,1848c + String sql = "SELECT DISTINCT data_id, group_id, tenant_id FROM config_info_aggr"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_CHANGED_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1070,1071c +3:1070,1071c + List list = configInfoAggrRepository.findAllAggrGroup(); + return ConfigInfoChangedMapStruct.INSTANCE.convertConfigInfoChangedList(list); +====1 +1:1853,1864c + String sql = "SELECT datum_id FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND content = ? "; + + try { + return this.jt.queryForList(sql, new Object[] {dataId, groupId, content}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1076c +3:1076c + return null; +====1 +1:1869,1877c + try { + List> list = jt.queryForList( + "SELECT data_id, group_id, tenant_id, app_name, content, gmt_modified FROM config_info WHERE gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertChangeConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1081,1084c +3:1081,1084c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Iterable iterable = configInfoRepository + .findAll(qConfigInfo.gmtModified.goe(startTime).and(qConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList((List) iterable); +====1 +1:1884,1924c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_modified from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + if (!StringUtils.isBlank(tenantTmp)) { + where += " and tenant_id = ? "; + params.add(tenantTmp); + } + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (startTime != null) { + where += " and gmt_modified >=? "; + params.add(startTime); + } + if (endTime != null) { + where += " and gmt_modified <=? "; + params.add(endTime); + } + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + lastMaxId, CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1091c +3:1091c + return null; +====1 +1:1929,1937c + try { + List> list = jt.queryForList( + "SELECT DISTINCT data_id, group_id, tenant_id FROM his_config_info WHERE op_type = 'D' AND gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertDeletedConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1096,1100c +3:1096,1100c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository.findAll( + qHisConfigInfo.opType.eq("D").and(qHisConfigInfo.gmtModified.goe(startTime)) + .and(qHisConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList((List) iterable); +====1 +1:1943,1947c + final String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + final String tenantTmp = + StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + +2:1105a +3:1105a +====1 +1:1953,1960c + + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + + KeyHolder keyHolder = new GeneratedKeyHolder(); + + final String sql = + "INSERT INTO config_info(data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_create," + + "gmt_modified,c_desc,c_use,effect,type,c_schema) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; +2:1111,1120c +3:1111,1120c + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setGmtCreate(time); + configInfoEntity.setGmtModified(time); +====1 +1:1963,1991c + jt.update(new PreparedStatementCreator() { + @Override + public PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + PreparedStatement ps = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS); + ps.setString(1, configInfo.getDataId()); + ps.setString(2, configInfo.getGroup()); + ps.setString(3, tenantTmp); + ps.setString(4, appNameTmp); + ps.setString(5, configInfo.getContent()); + ps.setString(6, md5Tmp); + ps.setString(7, srcIp); + ps.setString(8, srcUser); + ps.setTimestamp(9, time); + ps.setTimestamp(10, time); + ps.setString(11, desc); + ps.setString(12, use); + ps.setString(13, effect); + ps.setString(14, type); + ps.setString(15, schema); + return ps; + } + }, keyHolder); + Number nu = keyHolder.getKey(); + if (nu == null) { + throw new IllegalArgumentException("insert config_info fail"); + } + return nu.longValue(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:1123,1124c +3:1123,1124c + return configInfoRepository.save(configInfoEntity).getId(); + } catch (Exception e) { +====1 +1:2008,2015c + try { + jt.update( + "INSERT INTO config_tags_relation(id,tag_name,tag_type,data_id,group_id,tenant_id) VALUES(?,?,?,?,?,?)", + configId, tagName, null, dataId, group, tenant); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1141,1147c +3:1141,1147c + ConfigTagsRelationEntity configTagsRelation = new ConfigTagsRelationEntity(); + configTagsRelation.setId(configId); + configTagsRelation.setTagName(tagName); + configTagsRelation.setDataId(dataId); + configTagsRelation.setGroupId(group); + configTagsRelation.setTenantId(tenant); + configTagsRelationRepository.save(configTagsRelation); +====1 +1:2020,2025c + try { + jt.update("DELETE FROM config_tags_relation WHERE id=?", id); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1152c +3:1152c + configTagsRelationRepository.findById(id).ifPresent(s -> configTagsRelationRepository.delete(s)); +====1 +1:2030,2040c + String sql = "SELECT tag_name FROM config_tags_relation WHERE tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1157c +3:1157c + return null; +====1 +1:2045,2055c + String sql = "SELECT tag_name FROM config_tags_relation WHERE data_id=? AND group_id=? AND tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {dataId, group, tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1162,1176c +3:1162,1176c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigTagsRelationEntity qConfigTagsRelation = QConfigTagsRelationEntity.configTagsRelationEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigTagsRelation.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigTagsRelation.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigTagsRelation.tenantId.eq(tenant)); + } + Iterable iterable = configTagsRelationRepository.findAll(booleanBuilder); + List result = new ArrayList<>(); + iterable.forEach(s -> result.add(s.getTagName())); + return result; +====1 +1:2061,2067c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + jt.update("DELETE FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, group, + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1182,1187c +3:1182,1187c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); +====1 +1:2068a +2:1189,1190c +3:1189,1190c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + configInfos.forEach(s -> configInfoRepository.delete(s)); +====1 +1:2076,2077c + StringBuilder sql = new StringBuilder(SQL_DELETE_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1198,1200c +3:1198,1200c + if (StringUtils.isBlank(ids)) { + return; + } +====1 +1:2081,2084c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1203a +3:1203a +====1 +1:2087,2093c + sql.append(") "); + try { + jt.update(sql.toString(), paramList.toArray()); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1206,1219c +3:1206,1219c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + for (Long id : paramList) { + configInfoRepository.deleteById(id); + } + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2099,2106c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String tagTmp = StringUtils.isBlank(tag) ? StringUtils.EMPTY : tag; + try { + jt.update("DELETE FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", dataId, + group, tenantTmp, tagTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1225,1234c +3:1225,1234c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); +====1 +1:2107a +2:1236,1251c +3:1236,1251c + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + Iterable configInfoTags = configInfoTagRepository.findAll(booleanBuilder); + configInfoTags.forEach(s -> configInfoTagRepository.delete(s)); + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2113,2115c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String tenantTmp = StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:1257,1258c +3:1257,1258c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:2122,2131c + try { + jt.update("UPDATE config_info SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?," + + "app_name=?,c_desc=?,c_use=?,effect=?,type=?,c_schema=? " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5Tmp, srcIp, srcUser, + time, appNameTmp, desc, use, effect, type, schema, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1265,1272c +3:1265,1272c + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setGmtModified(time); + configInfoRepository.save(configInfoEntity); +====1 +1:2139,2140c + StringBuilder sql = new StringBuilder(SQL_FIND_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1279a +3:1279a +====1 +1:2144,2147c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1282a +3:1282a +====1 +1:2150,2158c + sql.append(") "); + try { + return this.jt.query(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1285,1288c +3:1285,1288c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + List list = (List) configInfoRepository + .findAll(qConfigInfo.id.in(paramList)); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(list); +====1 +1:2163,2176c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAdvanceInfo configAdvance = this.jt.queryForObject( + "SELECT gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_ADVANCE_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1293,1314c +3:1293,1314c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAdvanceInfo configAdvance = ConfigAdvanceInfoMapStruct.INSTANCE.convertConfigAdvanceInfo(configInfo); + List configTagList = this.selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2178c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1315a +3:1315a +====1 +1:2180,2185c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1317c +3:1317c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2186a +2:1319c +3:1319c + return configAdvance; +====1 +1:2191,2206c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAllInfo configAdvance = this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5," + + "gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_ALL_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1324,1346c +3:1324,1346c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAllInfo configAdvance = ConfigAllInfoMapStruct.INSTANCE.convertConfigAllInfo(configInfo); + configAdvance.setGroup(configInfo.getGroupId()); + List configTagList = selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2208c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1347a +3:1347a +====1 +1:2210,2215c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1349c +3:1349c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2216a +2:1351c +3:1351c + return configAdvance; +====1 +1:2225,2233c + try { + jt.update( + "INSERT INTO his_config_info (id,data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_modified,op_type) " + + "VALUES(?,?,?,?,?,?,?,?,?,?,?)", id, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp, appNameTmp, configInfo.getContent(), md5Tmp, srcIp, srcUser, time, ops); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1360,1373c +3:1360,1373c + HisConfigInfoEntity hisConfigInfo = new HisConfigInfoEntity(); + hisConfigInfo.setId(id); + hisConfigInfo.setDataId(configInfo.getDataId()); + hisConfigInfo.setGroupId(configInfo.getGroup()); + hisConfigInfo.setAppName(appNameTmp); + hisConfigInfo.setContent(configInfo.getContent()); + hisConfigInfo.setMd5(md5Tmp); + hisConfigInfo.setGmtModified(time); + hisConfigInfo.setSrcUser(srcUser); + hisConfigInfo.setSrcIp(srcIp); + hisConfigInfo.setOpType(ops); + hisConfigInfo.setTenantId(tenantTmp); + hisConfigInfo.setGmtCreate(time); + hisConfigInfoRepository.save(hisConfigInfo); +====1 +1:2239,2255c + PaginationHelper helper = createPaginationHelper(); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from his_config_info where data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select nid,data_id,group_id,tenant_id,app_name,src_ip,src_user,op_type,gmt_create,gmt_modified from his_config_info " + + "where data_id = ? and group_id = ? and tenant_id = ? order by nid desc"; + + Page page = null; + try { + page = helper + .fetchPage(sqlCountRows, sqlFetchRows, new Object[] {dataId, group, tenantTmp}, pageNo, pageSize, + HISTORY_LIST_ROW_MAPPER); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG + .error("[list-config-history] error, dataId:{}, group:{}", new Object[] {dataId, group}, e); + throw e; + } +2:1379,1389c +3:1379,1389c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + org.springframework.data.domain.Page sPage = hisConfigInfoRepository.findAll( + qHisConfigInfo.dataId.eq(dataId).and(qHisConfigInfo.groupId.eq(group)) + .and(qHisConfigInfo.tenantId.eq(tenant)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("nid")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); +====1 +1:2262,2270c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "INSERT INTO app_configdata_relation_subs(data_id,group_id,app_name,gmt_modified) VALUES(?,?,?,?)", + dataId, group, appNameTmp, date); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1395a +3:1395a +====1 +1:2276,2284c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "UPDATE app_configdata_relation_subs SET gmt_modified=? WHERE data_id=? AND group_id=? AND app_name=?", + time, dataId, group, appNameTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1401c +3:1401c + +==== +1:2289c + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; +2:1406,1434c + <<<<<<< HEAD + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + ||||||| a41d209d5 + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[list-config-history] error, nid:{}", new Object[] {nid}, e); + throw e; + } + ======= + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[detail-config-history] error, nid:{}", new Object[] {nid}, e); + throw e; + } + } + + @Override + public ConfigHistoryInfo detailPreviousConfigHistory(Long id) { + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = (select max(nid) from his_config_info where id = ?) "; +3:1406,1413c + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + } + + @Override + public ConfigHistoryInfo detailPreviousConfigHistory(Long id) { + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = (select max(nid) from his_config_info where id = ?) "; +====1 +1:2292c + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); +2:1437c +3:1416c + .queryForObject(sqlFetchRows, new Object[] {id}, HISTORY_DETAIL_ROW_MAPPER); +====1 +1:2295c + LogUtil.FATAL_LOG.error("[list-config-history] error, nid:{}", new Object[] {nid}, e); +2:1440c +3:1419c + LogUtil.FATAL_LOG.error("[detail-previous-config-history] error, id:{}", new Object[] {id}, e); +====2 +1:2297a +3:1421a +2:1443c + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:2303,2310c + try { + jt.update( + "INSERT INTO tenant_info(kp,tenant_id,tenant_name,tenant_desc,create_source,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?)", + kp, tenantId, tenantName, tenantDesc, createResoure, time, time); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1449,1457c +3:1427,1435c + TenantInfoEntity tenantInfo = new TenantInfoEntity(); + tenantInfo.setKp(kp); + tenantInfo.setTenantId(tenantId); + tenantInfo.setTenantName(tenantName); + tenantInfo.setTenantDesc(tenantDesc); + tenantInfo.setCreateSource(createResoure); + tenantInfo.setGmtCreate(time); + tenantInfo.setGmtModified(time); + tenantInfoRepository.save(tenantInfo); +====1 +1:2315,2322c + try { + jt.update( + "UPDATE tenant_info SET tenant_name = ?, tenant_desc = ?, gmt_modified= ? WHERE kp=? AND tenant_id=?", + tenantName, tenantDesc, System.currentTimeMillis(), kp, tenantId); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1462,1467c +3:1440,1445c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + tenantInfoRepository.findOne(qTenantInfo.kp.eq(kp).and(qTenantInfo.tenantId.eq(tenantId))).ifPresent(s -> { + s.setTenantName(tenantName); + s.setTenantDesc(tenantDesc); + tenantInfoRepository.save(s); + }); +====1 +1:2327,2338c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=?"; + try { + return this.jt.query(sql, new Object[] {kp}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1472,1473c +3:1450,1451c + List list = tenantInfoRepository.findByKp(kp); + return TenantInfoMapStruct.INSTANCE.convertTenantInfoList(list); +====1 +1:2343,2354c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=? AND tenant_id=?"; + try { + return jt.queryForObject(sql, new Object[] {kp, tenantId}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1478,1479c +3:1456,1457c + TenantInfoEntity tenantInfoEntity = tenantInfoRepository.findByKpAndTenantId(kp, tenantId); + return TenantInfoMapStruct.INSTANCE.convertTenantInfo(tenantInfoEntity); +====1 +1:2359,2364c + try { + jt.update("DELETE FROM tenant_info WHERE kp=? AND tenant_id=?", kp, tenantId); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1484,1485c +3:1462,1463c + tenantInfoRepository.findOne(QTenantInfoEntity.tenantInfoEntity.tenantId.eq(tenantId) + .and(QTenantInfoEntity.tenantInfoEntity.kp.eq(kp))).ifPresent(s -> tenantInfoRepository.delete(s)); +====1 +1:2418,2431c + String sqlCountRows = " SELECT COUNT(*) FROM config_info "; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,md5,type,gmt_modified FROM " + + "( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) g, config_info t WHERE g.id = t.id"; + PaginationHelper helper = createPaginationHelper(); + try { + Page page = helper + .fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_WRAPPER_ROW_MAPPER); + + return page.getPageItems(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1539c +3:1517c + return null; +====1 +1:2448,2458c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,type,gmt_modified,md5 FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1556,1560c +3:1534,1538c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + ConfigInfoEntity result = configInfoRepository.findOne( + qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))).orElse(null); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapper(result); +====1 +1:2463,2469c + String sql = String.format("select 1 from %s limit 1", tableName); + try { + jt.queryForObject(sql, Integer.class); + return true; + } catch (Throwable e) { + return false; + } +2:1565c +3:1543c + return true; +====1 +1:2518,2530c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList<>(); + if (!CollectionUtils.isEmpty(ids)) { + where.append(" id in ("); + for (int i = 0; i < ids.size(); i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(ids.get(i)); + } + where.append(") "); +2:1614,1617c +3:1592,1595c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (!org.springframework.util.CollectionUtils.isEmpty(ids)) { + booleanBuilder.and(qConfigInfo.id.in(ids)); +====1 +1:2532,2536c + where.append(" tenant_id=? "); + paramList.add(tenantTmp); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + paramList.add(generateLikeArgument(dataId)); +2:1619,1623c +3:1597,1601c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.like(dataId)); +====1 +1:2539,2540c + where.append(" and group_id=? "); + paramList.add(group); +2:1626c +3:1604c + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:2543,2544c + where.append(" and app_name=? "); + paramList.add(appName); +2:1629c +3:1607c + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:2547,2552c + try { + return this.jt.query(SQL_FIND_ALL_CONFIG_INFO + where, paramList.toArray(), CONFIG_ALL_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1632,1640c +3:1610,1618c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + List resultList = new ArrayList<>(); + configInfos.forEach(s -> { + ConfigAllInfo configAllInfo = new ConfigAllInfo(); + BeanUtils.copyProperties(s, configAllInfo); + configAllInfo.setGroup(s.getGroupId()); + resultList.add(configAllInfo); + }); + return resultList; +====1 +1:2643,2647c + Integer result = this.jt + .queryForObject(SQL_TENANT_INFO_COUNT_BY_TENANT_ID, new String[] {tenantId}, Integer.class); + if (result == null) { + return 0; + } +2:1731,1732c +3:1709,1710c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + Long result = tenantInfoRepository.count(qTenantInfo.tenantId.eq(tenantId)); diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_HistoryController.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_HistoryController.java.txt new file mode 100644 index 0000000000..b506cbd739 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_HistoryController.java.txt @@ -0,0 +1,120 @@ +====1 +1:31,33c + import javax.servlet.http.HttpServletRequest; + import javax.servlet.http.HttpServletResponse; + +2:30a +3:30a +====2 +1:42c +3:39c + +2:39c + +====2 +1:45c +3:42c + +2:42c + +====1 +1:49,53c + * @param dataId dataId string value. + * @param group group string value. + * @param tenant tenant string value. + * @param appName appName string value. + * @param pageNo pageNo string value. +2:46,50c +3:46,50c + * @param dataId dataId string value. + * @param group group string value. + * @param tenant tenant string value. + * @param appName appName string value. + * @param pageNo pageNo string value. +====2 +1:60,66c +3:57,63c + @RequestParam("group") String group, // + @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant, + @RequestParam(value = "appName", required = false) String appName, + @RequestParam(value = "pageNo", required = false) Integer pageNo, + // + @RequestParam(value = "pageSize", required = false) Integer pageSize, // + ModelMap modelMap) { +2:57,63c + @RequestParam("group") String group, // + @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant, + @RequestParam(value = "appName", required = false) String appName, + @RequestParam(value = "pageNo", required = false) Integer pageNo, + // + @RequestParam(value = "pageSize", required = false) Integer pageSize, // + ModelMap modelMap) { +====2 +1:73c +3:70c + +2:70c + +====1 +1:75c + * Query the detailed configuration history informations. +2:72,75c +3:72,75c + * Query the detailed configuration history information. + * + * @param nid history_config_info nid + * @return history config info +==== +1:78,79c + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { +2:78,86c + <<<<<<< HEAD + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { + ||||||| a41d209d5 + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { + ======= + public ConfigHistoryInfo getConfigHistoryInfo(@RequestParam("nid") Long nid) { + >>>>>>> TEMP_RIGHT_BRANCH +3:78c + public ConfigHistoryInfo getConfigHistoryInfo(@RequestParam("nid") Long nid) { +====2 +1:81a +3:80a +2:89,93c + <<<<<<< HEAD + + ||||||| a41d209d5 + + ======= +==== +1:82a +2:95,107c + /** + * Query previous config history information. + * + * @param id config_info id + * @return history config info + * @since 1.4.0 + */ + @GetMapping(value = "/previous") + public ConfigHistoryInfo getPreviousConfigHistoryInfo(@RequestParam("id") Long id) { + return persistService.detailPreviousConfigHistory(id); + } + + >>>>>>> TEMP_RIGHT_BRANCH +3:82,93c + /** + * Query previous config history information. + * + * @param id config_info id + * @return history config info + * @since 1.4.0 + */ + @GetMapping(value = "/previous") + public ConfigHistoryInfo getPreviousConfigHistoryInfo(@RequestParam("id") Long id) { + return persistService.detailPreviousConfigHistory(id); + } + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_MergeDatumService.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_MergeDatumService.java.txt new file mode 100644 index 0000000000..1d2e6e10d3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_MergeDatumService.java.txt @@ -0,0 +1,43 @@ +====1 +1:30,31c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:30,32c +3:30,32c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.ApplicationUtils; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:109c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIp()); +2:110c +3:110c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIP()); +====1 +1:117c + if (ApplicationUtils.getStandaloneMode()) { +2:118c +3:118c + if (EnvUtil.getStandaloneMode()) { +==== +1:166c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); +2:167,177c + <<<<<<< HEAD + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn( + "[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); + ||||||| a41d209d5 + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn("[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); + ======= + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIP(), null); +3:167c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIP(), null); +====2 +1:168a +3:169a +2:180c + >>>>>>> TEMP_RIGHT_BRANCH diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_MergeTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_MergeTaskProcessor.java.txt new file mode 100644 index 0000000000..5df60ac501 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_MergeTaskProcessor.java.txt @@ -0,0 +1,47 @@ +==== +1:19a +2:20,26c + <<<<<<< HEAD + import com.alibaba.nacos.common.task.AbstractDelayTask; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; + ======= + import com.alibaba.nacos.common.task.NacosTask; +3:20c + import com.alibaba.nacos.common.task.NacosTask; +==== +1:21c + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:28c + >>>>>>> TEMP_RIGHT_BRANCH +3:21a +====1 +1:22a +2:30c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:31c + import com.alibaba.nacos.core.utils.InetUtils; +2:39c +3:32c + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:55c + public boolean process(AbstractDelayTask task) { +2:63c +3:56c + public boolean process(NacosTask task) { +====1 +1:87c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +2:95c +3:88c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), +====1 +1:101c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +2:109c +3:102c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_NotifyTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_NotifyTaskProcessor.java.txt new file mode 100644 index 0000000000..9dcd6442e3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_NotifyTaskProcessor.java.txt @@ -0,0 +1,67 @@ +==== +1:19a +2:20,26c + <<<<<<< HEAD + import com.alibaba.nacos.common.task.AbstractDelayTask; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; + ======= + import com.alibaba.nacos.common.task.NacosTask; +3:20c + import com.alibaba.nacos.common.task.NacosTask; +==== +1:21c + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:28c + >>>>>>> TEMP_RIGHT_BRANCH +3:21a +====1 +1:22a +2:30c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:27,28c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:35,36c +3:28,29c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:49c + public boolean process(AbstractDelayTask task) { +2:57c +3:50c + public boolean process(NacosTask task) { +====1 +1:76c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +2:84c +3:77c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====1 +1:78c + .format(URL_PATTERN, serverIp, ApplicationUtils.getContextPath(), dataId, group); +2:86c +3:79c + .format(URL_PATTERN, serverIp, EnvUtil.getContextPath(), dataId, group); +====1 +1:82c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:90c +3:83c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====1 +1:92c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:100c +3:93c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====1 +1:100c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:108c +3:101c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_application.properties.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_application.properties.txt new file mode 100644 index 0000000000..d78b420770 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_application.properties.txt @@ -0,0 +1,89 @@ +====1 +1:40,41c + # db.user=nacos + # db.password=nacos +2:40,41c +3:40,41c + # db.user.0=nacos + # db.password.0=nacos +====1 +1:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-fe/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +2:112c +3:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-ui/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +==== +1:177a +2:178,214c + + <<<<<<< HEAD + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + + ||||||| a41d209d5 + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:178,211c + + + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + diff --git a/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_pom.xml.txt new file mode 100644 index 0000000000..99b5611a8e --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/gitmerge_recursive_patience/diff_pom.xml.txt @@ -0,0 +1,169 @@ +====1 +1:25c + 1.4.0-SNAPSHOT +2:25c +3:25c + 1.4.1-SNAPSHOT +====1 +1:39c + nacos-all-1.4.0-SNAPSHOT +2:39c +3:39c + nacos-all-1.4.1-SNAPSHOT +====1 +1:129c + 2.1.16.RELEASE +2:129c +3:129c + 2.1.17.RELEASE +====1 +1:131c + 2.6 +2:130a +3:130a +====1 +1:133c + 2.2 +2:132c +3:132c + 2.6 +====1 +1:144c + 1.7.17 +2:142a +3:142a +====1 +1:170a +2:169,175c +3:169,175c + 1.3.2.beta1 + 1.3.2.beta1 + 1.3.1.Final + 19.3.0.0 + 4.2.1 + 3.4.1 + 1.18.12 +====1 +1:279a +2:285,287c +3:285,287c + **/com/alibaba/nacos/config/server/modules/entity/*.java + **/com/alibaba/nacos/config/server/modules/mapstruct/*.java + **/com/alibaba/nacos/config/server/configuration/datasource/DynamicDataSource.java +==== +1:307c + **/istio/model/**,**/nacos/test/** +2:315,321c + <<<<<<< HEAD + **/istio/model/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** + ||||||| a41d209d5 + **/istio/model/**,**/nacos/test/** + ======= + **/istio/model/**,**/consistency/entity/**,**/nacos/test/** + >>>>>>> TEMP_RIGHT_BRANCH +3:315c + **/istio/model/**,**/consistency/entity/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** +====1 +1:341a +2:356c +3:350c + /console-ui/** +====1 +1:553a +2:569,587c +3:563,581c + + remove-test-data + + + + org.apache.maven.plugins + maven-clean-plugin + + false + + + ${user.home}/nacos/data + + + + + + + +====1 +1:581a +2:616c +3:610c + sys +====1 +1:688a +2:724,728c +3:718,722c + + ${project.groupId} + nacos-sys + ${project.version} + +====1 +1:712,717c + + commons-lang + commons-lang + ${commons-lang.version} + + +2:751a +3:745a +====1 +1:817,822c + + com.ning + async-http-client + ${async-http-client.version} + + +2:850a +3:844a +====1 +1:1027a +2:1056,1093c +3:1050,1087c + + + org.mapstruct + mapstruct-jdk8 + ${mapstruct.version} + + + + org.mapstruct + mapstruct-processor + ${mapstruct.version} + + + + org.projectlombok + lombok + true + ${lombok.version} + + + + com.querydsl + querydsl-jpa + ${querydsl.version} + + + + com.zaxxer + HikariCP + ${hikariCP.version} + + + + com.oracle.ojdbc + ojdbc8 + ${ojdbc.version} + + diff --git a/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_AsyncNotifyService.java.txt b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_AsyncNotifyService.java.txt new file mode 100644 index 0000000000..f5fe26eb1d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_AsyncNotifyService.java.txt @@ -0,0 +1,83 @@ +====1 +1:35,36c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:35,36c +3:35,36c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:107c + +2:107c +3:107c + +====1 +1:130c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +2:130c +3:130c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +====2 +1:135a +3:135a +2:136,143c + <<<<<<< HEAD + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, + String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); + ||||||| a41d209d5 + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); + ======= +==== +1:137c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +2:145,146c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); + >>>>>>> TEMP_RIGHT_BRANCH +3:137c + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====1 +1:157c + +2:166c +3:157c + +====1 +1:171c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +2:180c +3:171c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +====1 +1:177c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +2:186c +3:177c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +====1 +1:199c + InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +2:208c +3:199c + InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +====1 +1:262c + this.url = MessageFormat.format(URL_PATTERN, target, ApplicationUtils.getContextPath(), dataId, group); +2:271c +3:262c + this.url = MessageFormat.format(URL_PATTERN, target, EnvUtil.getContextPath(), dataId, group); +====1 +1:265c + .format(URL_PATTERN_TENANT, target, ApplicationUtils.getContextPath(), dataId, group, tenant); +2:274c +3:265c + .format(URL_PATTERN_TENANT, target, EnvUtil.getContextPath(), dataId, group, tenant); +====1 +1:312c + } +\ No newline at end of file +2:321c +3:312c + } diff --git a/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_ConfigController.java.txt b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_ConfigController.java.txt new file mode 100644 index 0000000000..eeb4b37ada --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_ConfigController.java.txt @@ -0,0 +1,194 @@ +====1 +1:18a +2:19c +3:19c + import com.alibaba.nacos.api.config.ConfigType; +====1 +1:22a +2:24c +3:24c + import com.alibaba.nacos.common.model.RestResultUtils; +==== +1:34a +2:37,38c + <<<<<<< HEAD + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +3:37c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +====2 +1:35a +3:38a +2:40,44c + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.result.ResultBuilder; + ======= + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:38c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +2:46a +3:40a +====1 +1:45a +2:54c +3:48c + import com.alibaba.nacos.common.utils.NamespaceUtil; +====1 +1:48c + import com.alibaba.nacos.core.utils.InetUtils; +2:57c +3:51c + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:93,94c + private static final String NAMESPACE_PUBLIC_KEY = "public"; + +2:101a +3:95a +====1 +1:137a +2:145,148c +3:139,142c + //check type + if (!ConfigType.isValidType(type)) { + type = ConfigType.getDefaultType().getType(); + } +====1 +1:178c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:189c +3:183c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:199c + tenant = processTenant(tenant); +2:210c +3:204c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====1 +1:283c + return ResultBuilder.buildSuccessResult(true); +2:294c +3:288c + return RestResultUtils.success(true); +====1 +1:472c + tenant = processTenant(tenant); +2:483c +3:477c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====1 +1:527c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:538c +3:532c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:530,534c + if (StringUtils.isNotBlank(namespace)) { + if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); + } +2:541,544c +3:535,538c + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====1 +1:535a +2:546c +3:540c + +====1 +1:548c + return ResultBuilder.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +2:559c +3:553c + return RestResultUtils.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +====1 +1:560c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +2:571c +3:565c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +====1 +1:584c + return ResultBuilder.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +2:595c +3:589c + return RestResultUtils.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +====1 +1:588c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:599c +3:593c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:601c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:612c +3:606c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:604c + return ResultBuilder.buildSuccessResult("导入成功", saveResult); +2:615c +3:609c + return RestResultUtils.success("导入成功", saveResult); +====1 +1:628c + return ResultBuilder.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +2:639c +3:633c + return RestResultUtils.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +====1 +1:631,634c + + if (NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(namespace)) { + namespace = ""; + } else if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { +2:642,644c +3:636,638c + + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { +====1 +1:636c + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +2:646c +3:640c + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====1 +1:650c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:660c +3:654c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:674c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +2:684c +3:678c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====1 +1:687c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +2:697c +3:691c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====1 +1:690,697c + return ResultBuilder.buildSuccessResult("Clone Completed Successfully", saveResult); + } + + private String processTenant(String tenant) { + if (StringUtils.isEmpty(tenant) || NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(tenant)) { + return ""; + } + return tenant; +2:700c +3:694c + return RestResultUtils.success("Clone Completed Successfully", saveResult); diff --git a/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_DiskUtils.java.txt b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_DiskUtils.java.txt new file mode 100644 index 0000000000..edbfb2552d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_DiskUtils.java.txt @@ -0,0 +1,24 @@ +356,364d355 +< <<<<<<< HEAD:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileOutputStream fos = new FileOutputStream( +< outputFile); final CheckedOutputStream cos = new CheckedOutputStream(fos, +< checksum); final ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(cos))) { +< ||||||| a41d209d5:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileOutputStream fos = new FileOutputStream( +< outputFile); final CheckedOutputStream cos = new CheckedOutputStream(fos, checksum); +< final ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(cos))) { +< ======= +368d358 +< >>>>>>> TEMP_RIGHT_BRANCH:sys/src/main/java/com/alibaba/nacos/sys/utils/DiskUtils.java +407,415d396 +< <<<<<<< HEAD:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileInputStream fis = new FileInputStream( +< sourceFile); final CheckedInputStream cis = new CheckedInputStream(fis, +< checksum); final ZipInputStream zis = new ZipInputStream(new BufferedInputStream(cis))) { +< ||||||| a41d209d5:core/src/main/java/com/alibaba/nacos/core/utils/DiskUtils.java +< try (final FileInputStream fis = new FileInputStream( +< sourceFile); final CheckedInputStream cis = new CheckedInputStream(fis, checksum); +< final ZipInputStream zis = new ZipInputStream(new BufferedInputStream(cis))) { +< ======= +419d399 +< >>>>>>> TEMP_RIGHT_BRANCH:sys/src/main/java/com/alibaba/nacos/sys/utils/DiskUtils.java diff --git a/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_EmbeddedRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_EmbeddedRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..465add68ba --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_EmbeddedRolePersistServiceImpl.java.txt @@ -0,0 +1,186 @@ +====1 +1:21c + import com.alibaba.nacos.config.server.service.repository.embedded.EmbeddedStoragePersistServiceImpl; +2:20a +3:20a +====1 +1:23a +2:23c +3:23c + import com.alibaba.nacos.config.server.service.repository.embedded.EmbeddedStoragePersistServiceImpl; +====1 +1:30a +2:31c +3:31c + import java.util.Collections; +====2 +1:43c +3:44c + +2:44c + +====2 +1:46c +3:47c + +2:47c + +====2 +1:49c +3:50c + +2:50c + +====2 +1:51c +3:52c + +2:52c + +====2 +1:53c +3:54c + +2:54c + +====2 +1:56c +3:57c + +2:57c + +====2 +1:58c +3:59c + +2:59c + +====2 +1:68c +3:69c + +2:69c + +====2 +1:70c +3:71c + +2:71c + +====2 +1:72c +3:73c + +2:73c + +====2 +1:74c +3:75c + +2:75c + +==== +1:77,80c + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { +2:78,95c + <<<<<<< HEAD + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + ||||||| a41d209d5 + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + ======= + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { + >>>>>>> TEMP_RIGHT_BRANCH +3:78,83c + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { +====2 +1:82a +3:85a +2:98,104c + <<<<<<< HEAD + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + ||||||| a41d209d5 + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + ======= +==== +1:84c + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, +2:106,107c + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, + >>>>>>> TEMP_RIGHT_BRANCH +3:87c + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, +====2 +1:86c +3:89c + +2:109c + +====2 +1:88c +3:91c + +2:111c + +====2 +1:96c +3:99c + +2:119c + +====2 +1:98c +3:101c + +2:121c + +====2 +1:106c +3:109c + +2:129c + +====2 +1:121c +3:124c + +2:144c + +====2 +1:137c +3:140c + +2:160c + +====2 +1:141c +3:144c + List users = databaseOperate.queryMany(sql, new String[] {"%" + role + "%"}, String.class); +2:164c + List users = databaseOperate.queryMany(sql, new String[]{"%" + role + "%"}, String.class); +====2 +1:144c +3:147c + +2:167c + diff --git a/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_ExternalDataSourceServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_ExternalDataSourceServiceImpl.java.txt new file mode 100644 index 0000000000..c8f9a445b4 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_ExternalDataSourceServiceImpl.java.txt @@ -0,0 +1,190 @@ +==== +1:19,39c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import javax.sql.DataSource; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + +2:19,44c + <<<<<<< HEAD + import com.alibaba.nacos.common.utils.ConvertUtils; + import com.alibaba.nacos.common.utils.StringUtils; + import com.alibaba.nacos.config.server.monitor.MetricsMonitor; + import com.alibaba.nacos.config.server.utils.PropertyUtil; + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.zaxxer.hikari.HikariDataSource; + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + ||||||| a41d209d5 + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import javax.sql.DataSource; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + ======= +3:18a +====1 +1:40a +2:46c +3:20c + import com.alibaba.nacos.common.utils.IPUtil; +====1 +1:45c + import com.alibaba.nacos.core.utils.ApplicationUtils; +2:51c +3:25c + import com.alibaba.nacos.sys.env.EnvUtil; +==== +1:46a +2:53,89c + >>>>>>> TEMP_RIGHT_BRANCH + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + <<<<<<< HEAD + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + ||||||| a41d209d5 + import com.alibaba.nacos.common.utils.ConvertUtils; + import com.alibaba.nacos.common.utils.StringUtils; + import com.alibaba.nacos.config.server.monitor.MetricsMonitor; + import com.alibaba.nacos.config.server.utils.ConfigExecutor; + import com.alibaba.nacos.config.server.utils.PropertyUtil; + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.zaxxer.hikari.HikariDataSource; + ======= + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + >>>>>>> TEMP_RIGHT_BRANCH +3:27,41c + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +====1 +1:55,58c + private static final Logger LOGGER = LoggerFactory.getLogger(ExternalDataSourceServiceImpl.class); + + private static final String JDBC_DRIVER_NAME = "com.mysql.cj.jdbc.Driver"; + +2:97a +3:49a +====1 +1:86,87c + private static Pattern ipPattern = Pattern.compile("\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}"); + +2:124a +3:76a +====1 +1:120,122c + + ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +2:157,159c +3:109,111c + + // ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + // ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +====1 +1:130c + .build(ApplicationUtils.getEnvironment(), (dataSource) -> { +2:167c +3:119c + .build(EnvUtil.getEnvironment(), (dataSource) -> { +====1 +1:193c + return "DOWN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +2:230c +3:182c + return "DOWN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====1 +1:196c + return "WARN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +2:233c +3:185c + return "WARN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====1 +1:204,217c + private String getIpFromUrl(String url) { + + Matcher m = ipPattern.matcher(url); + if (m.find()) { + return m.group(); + } + + return ""; + } + + static String defaultIfNull(String value, String defaultValue) { + return null == value ? defaultValue : value; + } + +2:240a +3:192a +====1 +1:272c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +2:295c +3:247c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); +====1 +1:275c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +2:298c +3:250c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); diff --git a/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_ExternalPermissionPersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_ExternalPermissionPersistServiceImpl.java.txt new file mode 100644 index 0000000000..3c95b4eb48 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_ExternalPermissionPersistServiceImpl.java.txt @@ -0,0 +1,215 @@ +==== +1:21c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +2:21,31c + <<<<<<< HEAD + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + ======= +3:20a +====1 +1:22a +2:33c +3:22c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +==== +1:24a +2:36c + >>>>>>> TEMP_RIGHT_BRANCH +3:25,28c + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; +====1 +1:27,28c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; +2:39c +3:31c + import org.springframework.data.domain.PageRequest; +====2 +1:30a +3:33a +2:42,49c + <<<<<<< HEAD + ||||||| a41d209d5 + import javax.annotation.PostConstruct; + import java.util.ArrayList; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.PERMISSION_ROW_MAPPER; + + ======= +====1 +1:32a +2:52,53c +3:36,37c + import java.util.Collections; + import java.util.List; +====2 +1:35a +3:40a +2:57c + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:46,53c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:68c +3:51c + private PermissionsRepository permissionsRepository; +==== +1:56,83c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role='" + role + "' "; + + if (StringUtils.isBlank(role)) { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:71,140c + <<<<<<< HEAD + org.springframework.data.domain.Page sPage = permissionsRepository + .findAll(QPermissionsEntity.permissionsEntity.role.eq(role), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(PermissionsMapStruct.INSTANCE.convertPermissionInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + ||||||| a41d209d5 + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role='" + role + "' "; + + if (StringUtils.isBlank(role)) { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + ======= + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(role)) { + params = Collections.singletonList(role); + } else { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:54,61c + org.springframework.data.domain.Page sPage = permissionsRepository + .findAll(QPermissionsEntity.permissionsEntity.role.eq(role), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(PermissionsMapStruct.INSTANCE.convertPermissionInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:94,102c + + String sql = "INSERT into permissions (role, resource, action) VALUES (?, ?, ?)"; + + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:151,152c +3:72,73c + + permissionsRepository.save(new PermissionsEntity(role, resource, action)); +====1 +1:113,120c + + String sql = "DELETE from permissions WHERE role=? and resource=? and action=?"; + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:163,167c +3:84,88c + + QPermissionsEntity qPermissions = QPermissionsEntity.permissionsEntity; + permissionsRepository.findOne( + qPermissions.role.eq(role).and(qPermissions.resource.eq(resource)).and(qPermissions.action.eq(action))) + .ifPresent(p -> permissionsRepository.delete(p)); diff --git a/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_ExternalRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_ExternalRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..b2b75323b7 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_ExternalRolePersistServiceImpl.java.txt @@ -0,0 +1,283 @@ +==== +1:21c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +2:21,31c + <<<<<<< HEAD + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + ======= +3:20a +====1 +1:22a +2:33c +3:22c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; +==== +1:24a +2:36c + >>>>>>> TEMP_RIGHT_BRANCH +3:25,28c + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; +====1 +1:27,29c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.RowMapper; +2:39c +3:31c + import org.springframework.data.domain.PageRequest; +====2 +1:31a +3:33a +2:42,48c + <<<<<<< HEAD + ||||||| a41d209d5 + import javax.annotation.PostConstruct; + import java.sql.ResultSet; + import java.sql.SQLException; + import java.util.ArrayList; + ======= +==== +1:35a +2:53,54c + import java.util.Collections; + >>>>>>> TEMP_RIGHT_BRANCH +3:38c + import java.util.Collections; +====1 +1:36a +2:56c +3:40c + import java.util.stream.Collectors; +====1 +1:38c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.ROLE_INFO_ROW_MAPPER; +2:57a +3:41a +====1 +1:50,57c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:69c +3:53c + private RolesRepository rolesRepository; +====1 +1:61,81c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from (select distinct role from roles) roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " 1=1 "; + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + return pageInfo; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:73,80c +3:57,64c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +==== +1:86,104c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + where = " 1=1 "; + } + + try { + return helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:85,135c + <<<<<<< HEAD + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(QRolesEntity.rolesEntity.username.eq(username), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + ||||||| a41d209d5 + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + where = " 1=1 "; + } + + try { + return helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + ======= + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { + where = " 1=1 "; + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:69,76c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(QRolesEntity.rolesEntity.username.eq(username), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:110c + * @param role role string value. +2:141c +3:82c + * @param role role string value. +====1 +1:115,122c + String sql = "INSERT into roles (role, username) VALUES (?, ?)"; + + try { + jt.update(sql, role, userName); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:146c +3:87c + rolesRepository.save(new RolesEntity(userName, role)); +====1 +1:131,137c + String sql = "DELETE from roles WHERE role=?"; + try { + jt.update(sql, role); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:155,156c +3:96,97c + Iterable iterable = rolesRepository.findAll(QRolesEntity.rolesEntity.role.eq(role)); + rolesRepository.deleteAll(iterable); +====1 +1:143c + * @param role role string value. +2:162c +3:103c + * @param role role string value. +====1 +1:147,153c + String sql = "DELETE from roles WHERE role=? and username=?"; + try { + jt.update(sql, role, username); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:166,168c +3:107,109c + QRolesEntity qRoles = QRolesEntity.rolesEntity; + rolesRepository.findOne(qRoles.role.eq(role).and(qRoles.username.eq(username))) + .ifPresent(s -> rolesRepository.delete(s)); +==== +1:158,160c + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[]{role}, String.class); + return users; +2:173,185c + <<<<<<< HEAD + List rolesEntities = (List) rolesRepository + .findAll(QRolesEntity.rolesEntity.role.like(role)); + return rolesEntities.stream().map(s -> s.getRole()).collect(Collectors.toList()); + ||||||| a41d209d5 + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[]{role}, String.class); + return users; + ======= + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[] {role}, String.class); + return users; + >>>>>>> TEMP_RIGHT_BRANCH +3:114,116c + List rolesEntities = (List) rolesRepository + .findAll(QRolesEntity.rolesEntity.role.like(role)); + return rolesEntities.stream().map(s -> s.getRole()).collect(Collectors.toList()); +====1 +1:163,172c + private static final class RoleInfoRowMapper implements RowMapper { + + @Override + public RoleInfo mapRow(ResultSet rs, int rowNum) throws SQLException { + RoleInfo roleInfo = new RoleInfo(); + roleInfo.setRole(rs.getString("role")); + roleInfo.setUsername(rs.getString("username")); + return roleInfo; + } + } +2:187a +3:118a diff --git a/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_ExternalStoragePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_ExternalStoragePersistServiceImpl.java.txt new file mode 100644 index 0000000000..44c1365407 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_ExternalStoragePersistServiceImpl.java.txt @@ -0,0 +1,3017 @@ +====1 +1:41,42c + import com.alibaba.nacos.config.server.service.datasource.DataSourceService; + import com.alibaba.nacos.config.server.service.datasource.DynamicDataSource; +2:41,74c +3:41,74c + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.HisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.QHisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QTenantInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.TenantInfoEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAdvanceInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAllInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigHistoryInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4BetaMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4TagMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoAggrMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoBetaWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoChangedMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoEntityMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoTagWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.TenantInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoAggrRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoBetaRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoTagRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigTagsRelationRepository; + import com.alibaba.nacos.config.server.modules.repository.HisConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.TenantInfoRepository; +====1 +1:48c + import com.google.common.collect.Lists; +2:80,81c +3:80,81c + import com.querydsl.core.BooleanBuilder; + import lombok.extern.slf4j.Slf4j; +====1 +1:50a +2:84,85c +3:84,85c + import org.springframework.beans.BeanUtils; + import org.springframework.beans.factory.annotation.Autowired; +====1 +1:55c + import org.springframework.dao.IncorrectResultSizeDataAccessException; +2:90,92c +3:90,92c + import org.springframework.data.domain.PageRequest; + import org.springframework.data.domain.Sort; + import org.springframework.data.jpa.domain.Specification; +====1 +1:57,61c + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.PreparedStatementCreator; + import org.springframework.jdbc.core.PreparedStatementSetter; + import org.springframework.jdbc.support.GeneratedKeyHolder; + import org.springframework.jdbc.support.KeyHolder; +2:93a +3:93a +====1 +1:70c + import javax.annotation.PostConstruct; +2:102,105c +3:102,105c + import javax.persistence.criteria.CriteriaBuilder; + import javax.persistence.criteria.CriteriaQuery; + import javax.persistence.criteria.Predicate; + import javax.persistence.criteria.Root; +====1 +1:72,75c + import java.sql.Connection; + import java.sql.PreparedStatement; + import java.sql.SQLException; + import java.sql.Statement; +2:106a +3:106a +====1 +1:81a +2:113c +3:113c + import java.util.stream.Collectors; +====1 +1:83,97c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ADVANCE_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ALL_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4TAG_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_AGGR_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BASE_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_CHANGED_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_KEY_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_DETAIL_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_LIST_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.TENANT_INFO_ROW_MAPPER; +2:114a +3:114a +====1 +1:104a +2:122c +3:122c + @Slf4j +====1 +1:110c + private DataSourceService dataSourceService; +2:128,129c +3:128,129c + @Autowired + private ConfigInfoRepository configInfoRepository; +====1 +1:112c + private static final String SQL_FIND_ALL_CONFIG_INFO = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,c_schema from config_info"; +2:131,132c +3:131,132c + @Autowired + private ConfigInfoBetaRepository configInfoBetaRepository; +====1 +1:114c + private static final String SQL_TENANT_INFO_COUNT_BY_TENANT_ID = "select count(1) from tenant_info where tenant_id = ?"; +2:134,135c +3:134,135c + @Autowired + private ConfigInfoTagRepository configInfoTagRepository; +====1 +1:116c + private static final String SQL_FIND_CONFIG_INFO_BY_IDS = "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5 FROM config_info WHERE "; +2:137,138c +3:137,138c + @Autowired + private ConfigTagsRelationRepository configTagsRelationRepository; +====1 +1:118c + private static final String SQL_DELETE_CONFIG_INFO_BY_IDS = "DELETE FROM config_info WHERE "; +2:140,141c +3:140,141c + @Autowired + private HisConfigInfoRepository hisConfigInfoRepository; +====1 +1:120c + private static final String PATTERN_STR = "*"; +2:143,144c +3:143,144c + @Autowired + private TenantInfoRepository tenantInfoRepository; +====1 +1:122c + private static final int QUERY_LIMIT_SIZE = 50; +2:146,147c +3:146,147c + @Autowired + private ConfigInfoAggrRepository configInfoAggrRepository; +====1 +1:124,126c + protected JdbcTemplate jt; + + protected TransactionTemplate tjt; +2:149,150c +3:149,150c + @Autowired + private TransactionTemplate tjt; +====1 +1:133,171c + /** + * init datasource. + */ + @PostConstruct + public void init() { + dataSourceService = DynamicDataSource.getInstance().getDataSource(); + + jt = getJdbcTemplate(); + tjt = getTransactionTemplate(); + } + + public boolean checkMasterWritable() { + return dataSourceService.checkMasterWritable(); + } + + public void setBasicDataSourceService(DataSourceService dataSourceService) { + this.dataSourceService = dataSourceService; + } + + public synchronized void reload() throws IOException { + this.dataSourceService.reload(); + } + + /** + * For unit testing. + */ + public JdbcTemplate getJdbcTemplate() { + return this.dataSourceService.getJdbcTemplate(); + } + + public TransactionTemplate getTransactionTemplate() { + return this.dataSourceService.getTransactionTemplate(); + } + + @SuppressWarnings("checkstyle:AbbreviationAsWordInName") + public String getCurrentDBUrl() { + return this.dataSourceService.getCurrentDbUrl(); + } + +2:156a +3:156a +====1 +1:174c + return new ExternalStoragePaginationHelperImpl(jt); +2:159c +3:159c + return null; +====1 +1:182,191c + boolean result = tjt.execute(status -> { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:167,183c +3:167,183c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfoEntity.getGroupId(), + configInfoEntity.getTenantId()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; + } + return Boolean.TRUE; +====1 +1:193c + return Boolean.TRUE; +2:184a +3:184a +====1 +1:202c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:193c +3:193c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:204,207c + jt.update("INSERT INTO config_info_beta(data_id,group_id,tenant_id,app_name,content,md5,beta_ips,src_ip," + + "src_user,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, appNameTmp, configInfo.getContent(), md5, betaIps, srcIp, srcUser, + time, time); +2:195,207c +3:195,207c + ConfigInfoBetaEntity configInfoBeta = new ConfigInfoBetaEntity(); + configInfoBeta.setDataId(configInfo.getDataId()); + configInfoBeta.setGroupId(configInfo.getGroup()); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setBetaIps(betaIps); + configInfoBeta.setMd5(md5); + configInfoBeta.setGmtCreate(time); + configInfoBeta.setGmtModified(time); + configInfoBeta.setSrcUser(srcUser); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setTenantId(tenantTmp); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:209c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:209c +3:209c + log.error("[db-error] " + e.toString(), e); +====1 +1:220c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:219a +3:219a +====1 +1:222,226c + jt.update( + "INSERT INTO config_info_tag(data_id,group_id,tenant_id,tag_id,app_name,content,md5,src_ip,src_user," + + "gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, tagTmp, appNameTmp, configInfo.getContent(), md5, srcIp, srcUser, + time, time); +2:221,234c +3:221,234c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoTagEntity configInfoTag = new ConfigInfoTagEntity(); + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:228c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:236c +3:236c + log.error("[db-error] " + e.toString(), e); +====1 +1:236,254c + boolean result = tjt.execute(status -> { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + /* + If the appName passed by the user is not empty, use the persistent user's appName, + otherwise use db; when emptying appName, you need to pass an empty string + */ + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // delete all tags and then recreate + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); +2:244,268c +3:244,268c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + // 用户传过来的appName不为空,则用持久化用户的appName,否则用db的;清空appName的时候需要传空串 + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + configInfo.setId(oldConfigInfo.getId()); + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // 删除所有tag,然后再重新创建 + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); + } + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:256,259c + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:270c +3:270c + return Boolean.TRUE; +====1 +1:261c + return Boolean.TRUE; +2:271a +3:271a +====1 +1:268c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); +2:277a +3:277a +====1 +1:270,275c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + try { + jt.update( + "UPDATE config_info_beta SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5, srcIp, srcUser, + time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp); +2:279,300c +3:279,300c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(configInfo.getDataId())) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(configInfo.getDataId())); + } + if (StringUtils.isNotBlank(configInfo.getGroup())) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(configInfo.getGroup())); + } + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenantTmp)); + } + ConfigInfoBetaEntity configInfoBeta = configInfoBetaRepository.findOne(booleanBuilder).orElse(null); + try { + String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setMd5(md5); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setSrcUser(srcUser); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:277c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:302c +3:302c + log.error("[db-error] " + e.toString(), e); +====1 +1:288,293c + try { + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + jt.update( + "UPDATE config_info_tag SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", configInfo.getContent(), md5, + srcIp, srcUser, time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp, tagTmp); +2:313,331c +3:313,331c + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + ConfigInfoTagEntity configInfoTag = configInfoTagRepository.findOne( + qConfigInfoTag.dataId.eq(configInfo.getDataId()).and(qConfigInfoTag.groupId.eq(configInfo.getGroup())) + .and(qConfigInfoTag.tenantId.eq(tenantTmp)).and(qConfigInfoTag.tagId.eq(tagTmp))) + .orElse(new ConfigInfoTagEntity()); + try { + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:295c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:333c +3:333c + log.error("[db-error] " + e.toString(), e); +====1 +1:323,330c + try { + jt.update( + "UPDATE config_info SET md5 = ? WHERE data_id=? AND group_id=? AND tenant_id=? AND gmt_modified=?", + md5, dataId, group, tenantTmp, lastTime); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:361,378c +3:361,378c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (lastTime != null) { + booleanBuilder.and(qConfigInfo.gmtModified.eq(lastTime)); + } + configInfoRepository.findOne(booleanBuilder).ifPresent(config -> { + config.setMd5(md5); + configInfoRepository.save(config); + }); +====1 +1:416,421c + tjt.execute(status -> { + try { + ConfigInfo configInfo = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo != null) { + jt.update("DELETE FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, + group, tenantTmp); +2:464,474c +3:464,474c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo4Beta configInfo4Beta = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo4Beta != null) { + configInfoBetaRepository.deleteById(configInfo4Beta.getId()); + } + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:423,425c + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:476c +3:476c + return Boolean.TRUE; +====1 +1:427c + return Boolean.TRUE; +2:477a +3:477a +====1 +1:439,442c + String select = "SELECT content FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + String insert = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + String update = "UPDATE config_info_aggr SET content = ? , gmt_modified = ? WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + +2:489,502c +3:489,502c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } +====1 +1:445,446c + String dbContent = jt + .queryForObject(select, new Object[] {dataId, group, tenantTmp, datumId}, String.class); +2:505c +3:505c + ConfigInfoAggrEntity result = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); +====1 +1:448c + if (dbContent != null && dbContent.equals(content)) { +2:507c +3:507c + if (result.getContent() != null && result.getContent().equals(content)) { +====1 +1:451c + return jt.update(update, content, now, dataId, group, tenantTmp, datumId) > 0; +2:510,513c +3:510,513c + result.setContent(content); + result.setGmtModified(now); + configInfoAggrRepository.save(result); + return true; +====1 +1:454c + return jt.update(insert, dataId, group, tenantTmp, datumId, appNameTmp, content, now) > 0; +2:516,526c +3:516,526c + ConfigInfoAggrEntity configInfoAggrEntity = new ConfigInfoAggrEntity(); + configInfoAggrEntity.setDataId(dataId); + configInfoAggrEntity.setGroupId(group); + configInfoAggrEntity.setDatumId(datumId); + configInfoAggrEntity.setContent(content); + configInfoAggrEntity.setGmtModified(now); + configInfoAggrEntity.setAppName(appNameTmp); + configInfoAggrEntity.setTenantId(tenantTmp); + configInfoAggrRepository.save(configInfoAggrEntity); + return true; + +====1 +1:465,466c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; +2:536a +3:536a +====1 +1:468,482c + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index++, tenantTmp); + ps.setString(index, datumId); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:537a +3:537a +====1 +1:487,502c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=?"; + + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index, tenantTmp); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:542,545c +3:542,545c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); +====1 +1:503a +2:547,553c +3:547,553c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + configInfoAggrRepository.findOne(booleanBuilder).ifPresent(aggr -> configInfoAggrRepository.delete(aggr)); +====1 +1:509,523c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final StringBuilder datumString = new StringBuilder(); + for (String datum : datumList) { + datumString.append("'").append(datum).append("',"); + } + datumString.deleteCharAt(datumString.length() - 1); + final String sql = + "delete from config_info_aggr where data_id=? and group_id=? and tenant_id=? and datum_id in (" + + datumString.toString() + ")"; + try { + jt.update(sql, dataId, group, tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:558a +3:558a +====1 +1:529,536c + String sql = "delete from his_config_info where gmt_modified < ? limit ?"; + PaginationHelper helper = createPaginationHelper(); + try { + helper.updateLimit(sql, new Object[] {startTime, limitSize}); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:564,567c +3:564,567c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository + .findAll(qHisConfigInfo.gmtModified.lt(startTime), PageRequest.of(0, limitSize)); + hisConfigInfoRepository.deleteAll(iterable); +====1 +1:541,542c + String sql = "SELECT COUNT(*) FROM his_config_info WHERE gmt_modified < ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {startTime}); +2:572,573c +3:572,573c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Long result = hisConfigInfoRepository.count(qHisConfigInfo.gmtModified.lt(startTime)); +====1 +1:551c + String sql = "SELECT max(id) FROM config_info"; +2:581a +3:581a +====1 +1:553c + return jt.queryForObject(sql, Long.class); +2:583,584c +3:583,584c + //TODO 关系型特性查询 + return configInfoRepository.findConfigMaxId(); +====1 +1:591,617c + try { + Boolean isReplaceOk = tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + String appNameTmp = appName == null ? "" : appName; + removeAggrConfigInfo(dataId, group, tenant); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + for (Map.Entry datumEntry : datumMap.entrySet()) { + jt.update(sql, dataId, group, tenantTmp, datumEntry.getKey(), appNameTmp, + datumEntry.getValue(), new Timestamp(System.currentTimeMillis())); + } + } catch (Throwable e) { + throw new TransactionSystemException("error in addAggrConfigInfo"); + } + return Boolean.TRUE; + } + }); + if (isReplaceOk == null) { + return false; + } + return isReplaceOk; + } catch (TransactionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:622c +3:622c + return true; +====1 +1:624,636c + String sql = "SELECT DISTINCT data_id, group_id FROM config_info"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:629c +3:629c + return null; +====1 +1:641,651c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,beta_ips FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO4BETA_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:634,647c +3:634,647c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenant)); + } + ConfigInfoBetaEntity configInfoBetaEntity = configInfoBetaRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoBeta data null")); + return ConfigInfo4BetaMapStruct.INSTANCE.convertConfigInfo4Beta(configInfoBetaEntity); +====1 +1:659,668c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,tag_id,app_name,content FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", + new Object[] {dataId, group, tenantTmp, tagTmp}, CONFIG_INFO4TAG_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:655,671c +3:655,671c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + ConfigInfoTagEntity result = configInfoTagRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoTag data null")); + return ConfigInfo4TagMapStruct.INSTANCE.convertConfigInfo4Tag(result); +====1 +1:674,684c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=? AND app_name=?", + new Object[] {dataId, group, tenantTmp, appName}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:677c +3:677c + return null; +====1 +1:690,733c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(group); + paramList.add(tenantTmp); + + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and group_id=? and tenant_id=? "); + if (StringUtils.isNotBlank(configTags)) { + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.group_id=? and a.tenant_id=? "); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sql.append(", "); + } + sql.append("?"); + paramList.add(tagArr[i]); + } + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return this.jt.queryForObject(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:683c +3:683c + return null; +====1 +1:739,748c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, StringUtils.EMPTY}, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:689,695c +3:689,695c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + return configInfoRepository.findOne(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group))).map(s -> { + ConfigInfoBase configInfoBase = new ConfigInfoBase(); + BeanUtils.copyProperties(s, configInfoBase); + configInfoBase.setGroup(s.getGroupId()); + return configInfoBase; + }).orElse(null); +====1 +1:753,762c + try { + return this.jt + .queryForObject("SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE ID=?", + new Object[] {id}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:700,701c +3:700,701c + + return null; +====1 +1:767,776c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5,type FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:706,712c +3:706,712c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:777a +2:714,718c +3:714,718c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity result = configInfoRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfo(result); +====1 +1:783,792c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:724,725c +3:724,725c + + return null; +====1 +1:798,807c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? and app_name=?", + new Object[] {dataId, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:731,732c +3:731,732c + + return null; +====1 +1:813,864c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where data_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:738c +3:738c + return null; +====1 +1:870,871c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); +2:743a +3:743a +====1 +1:874,922c + String sqlCount = "select count(*) from config_info"; + String sql = "select ID,data_id,group_id,tenant_id,app_name,content,type from config_info"; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id"; + sql = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id"; + + where.append(" a.tenant_id=? "); + + if (StringUtils.isNotBlank(dataId)) { + where.append(" and a.data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and a.group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and a.app_name=? "); + paramList.add(appName); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id=? "); + if (StringUtils.isNotBlank(dataId)) { + where.append(" and data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and app_name=? "); + paramList.add(appName); + } +2:746,765c +3:746,765c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + } + + private void buildConfigInfoCommonCondition(BooleanBuilder booleanBuilder, QConfigInfoEntity qConfigInfo, + final String dataId, final String group, final String appName) { + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); +====1 +1:924,929c + try { + return helper.fetchPage(sqlCount + where, sql + where, paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:767,771c +3:767,771c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(appName)) { + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:935,943c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:777c +3:777c + return null; +====1 +1:949,958c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:783c +3:783c + return null; +====1 +1:964,973c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=? and app_name =?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? and app_name =?", + new Object[] {group, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:789c +3:789c + return null; +====1 +1:979,1032c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder( + "select count(*) from config_info where group_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(group); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:795c +3:795c + return null; +====1 +1:1038,1047c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where tenant_id like ? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? and app_name=?", + new Object[] {generateLikeArgument(tenantTmp), appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:801c +3:801c + return null; +====1 +1:1053,1104c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where tenant_id like ? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:807c +3:807c + return null; +====1 +1:1110,1118c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:813c +3:813c + return null; +====1 +1:1123,1124c + String sql = " SELECT COUNT(ID) FROM config_info "; + Integer result = jt.queryForObject(sql, Integer.class); +2:818c +3:818c + Long result = configInfoRepository.count(); +====1 +1:1133,1134c + String sql = " SELECT COUNT(ID) FROM config_info where tenant_id like ?"; + Integer result = jt.queryForObject(sql, new Object[] {tenant}, Integer.class); +2:827,828c +3:827,828c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.tenantId.like(tenant)); +====1 +1:1143,1144c + String sql = " SELECT COUNT(ID) FROM config_info_beta "; + Integer result = jt.queryForObject(sql, Integer.class); +2:837c +3:837c + Long result = configInfoBetaRepository.count(); +====1 +1:1153,1154c + String sql = " SELECT COUNT(ID) FROM config_info_tag "; + Integer result = jt.queryForObject(sql, Integer.class); +2:846c +3:846c + Long result = configInfoTagRepository.count(); +====1 +1:1162,1165c + public List getTenantIdList(int page, int pageSize) { + String sql = "SELECT tenant_id FROM config_info WHERE tenant_id != '' GROUP BY tenant_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:854,864c +3:854,864c + public List getTenantIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("tenantId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1169,1172c + public List getGroupIdList(int page, int pageSize) { + String sql = "SELECT group_id FROM config_info WHERE tenant_id ='' GROUP BY group_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:868,878c +3:868,878c + public List getGroupIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("groupId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1178,1179c + String sql = " SELECT COUNT(ID) FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {dataId, group, tenantTmp}); +2:884,886c +3:884,886c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))); +====1 +1:1188,1213c + if (datumIds == null || datumIds.isEmpty()) { + return 0; + } + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder sql = new StringBuilder( + " SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ? and datum_id"); + if (isIn) { + sql.append(" in ("); + } else { + sql.append(" not in ("); + } + for (int i = 0, size = datumIds.size(); i < size; i++) { + if (i > 0) { + sql.append(", "); + } + sql.append("?"); + } + sql.append(")"); + + List objectList = Lists.newArrayList(dataId, group, tenantTmp); + objectList.addAll(datumIds); + Integer result = jt.queryForObject(sql.toString(), Integer.class, objectList.toArray()); + if (result == null) { + throw new IllegalArgumentException("aggrConfigInfoCount error"); + } + return result.intValue(); +2:895c +3:895c + return 0; +====1 +1:1228,1242c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5 " + + " FROM ( SELECT id FROM config_info WHERE tenant_id like ? ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, + new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:910c +3:910c + return null; +====1 +1:1247,1282c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String select = " SELECT data_id,group_id,app_name FROM ( " + + " SELECT id FROM config_info WHERE tenant_id LIKE ? ORDER BY id LIMIT ?, ? )" + + " g, config_info t WHERE g.id = t.id "; + + final int totalCount = configInfoCount(tenant); + int pageCount = totalCount / pageSize; + if (totalCount > pageSize * pageCount) { + pageCount++; + } + + if (pageNo > pageCount) { + return null; + } + + final Page page = new Page(); + page.setPageNumber(pageNo); + page.setPagesAvailable(pageCount); + page.setTotalCount(totalCount); + + try { + List result = jt + .query(select, new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, + // new Object[0], + CONFIG_KEY_ROW_MAPPER); + + for (ConfigKey item : result) { + page.getPageItems().add(item); + } + return page; + } catch (EmptyResultDataAccessException e) { + return page; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:915c +3:915c + return null; +====1 +1:1288,1300c + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,content,md5" + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) " + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:921c +3:921c + return null; +====1 +1:1305,1319c + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = " SELECT t.id,type,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + + List params = new ArrayList(); + + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, params.toArray(), pageNo, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:926c +3:926c + return null; +====1 +1:1324,1332c + String select = "SELECT id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,type from config_info where id > ? order by id asc limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(select, new Object[] {lastMaxId, 0, pageSize}, 1, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:931,940c +3:931,940c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(qConfigInfo.id.gt(lastMaxId), PageRequest.of(0, pageSize, Sort.by(Sort.Order.asc("id")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1337,1349c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_beta"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,beta_ips " + + " FROM ( SELECT id FROM config_info_beta ORDER BY id LIMIT ?,? )" + + " g, config_info_beta t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:945,952c +3:945,952c + org.springframework.data.domain.Page sPage = configInfoBetaRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoBetaWrapperMapStruct.INSTANCE.convertConfigInfoBetaWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1354,1366c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_tag"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,tag_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info_tag ORDER BY id LIMIT ?,? ) " + + "g, config_info_tag t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:957,964c +3:957,964c + org.springframework.data.domain.Page sPage = configInfoTagRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoTagWrapperMapStruct.INSTANCE.convertConfigInfoTagWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1372,1414c + // assert dataids group not null + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + // if dataids empty return empty list + if (CollectionUtils.isEmpty(dataIds)) { + return Collections.emptyList(); + } + + // Batch query limit + // The number of in is controlled within 100, the shorter the length of the SQL statement, the better + if (subQueryLimit > QUERY_LIMIT_SIZE) { + subQueryLimit = 50; + } + List result = new ArrayList(dataIds.size()); + + String sqlStart = "select data_id, group_id, tenant_id, app_name, content from config_info where group_id = ? and tenant_id = ? and data_id in ("; + String sqlEnd = ")"; + StringBuilder subQuerySql = new StringBuilder(); + + for (int i = 0; i < dataIds.size(); i += subQueryLimit) { + // dataids + List params = new ArrayList( + dataIds.subList(i, i + subQueryLimit < dataIds.size() ? i + subQueryLimit : dataIds.size())); + + for (int j = 0; j < params.size(); j++) { + subQuerySql.append("?"); + if (j != params.size() - 1) { + subQuerySql.append(","); + } + } + + // group + params.add(0, group); + params.add(1, tenantTmp); + + List r = this.jt + .query(sqlStart + subQuerySql.toString() + sqlEnd, params.toArray(), CONFIG_INFO_ROW_MAPPER); + + // assert not null + if (r != null && r.size() > 0) { + result.addAll(r); + } + } + return result; +2:970c +3:970c + return null; +====1 +1:1420,1463c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + if (StringUtils.isBlank(appName)) { + return this.findAllConfigInfo(pageNo, pageSize, tenantTmp); + } else { + return this.findConfigInfoByApp(pageNo, pageSize, tenantTmp, appName); + } + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + where += " and tenant_id like ? "; + params.add(generateLikeArgument(tenantTmp)); + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:976c +3:976c + return null; +====1 +1:1469,1562c + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + for (ConfigKey configInfo : configKeys) { + String dataId = configInfo.getDataId(); + String group = configInfo.getGroup(); + String appName = configInfo.getAppName(); + + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + return helper.fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:982c +3:982c + return null; +====1 +1:1572,1636c + PaginationHelper helper = createPaginationHelper(); + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info"; + StringBuilder where = new StringBuilder(" where "); + List params = new ArrayList(); + params.add(generateLikeArgument(tenantTmp)); + if (StringUtils.isNotBlank(configTags)) { + sqlCountRows = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id "; + sqlFetchRows = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join config_tags_relation b on a.id=b.id "; + + where.append(" a.tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and a.data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and a.group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and a.app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and a.content like ? "); + params.add(generateLikeArgument(content)); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + params.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and content like ? "); + params.add(generateLikeArgument(content)); + } + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:992,1008c +3:992,1008c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.like(tenant)); + } + if (StringUtils.isNotBlank(content)) { + booleanBuilder.and(qConfigInfo.content.like(content)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1642,1672c + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + throw new IOException("invalid param"); + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,content from config_info where "; + String where = " 1=1 and tenant_id='' "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1014c +3:1014c + return null; +====1 +1:1678,1691c + String sql = "SELECT id,data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; + + try { + return this.jt + .queryForObject(sql, new Object[] {dataId, group, tenantTmp, datumId}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + // EmptyResultDataAccessException, indicating that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); +2:1020,1026c +3:1020,1026c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); +====1 +1:1692a +2:1028,1036c +3:1028,1036c + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenantTmp)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } + + ConfigInfoAggrEntity configInfoAggrEntity = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggr(configInfoAggrEntity); +====1 +1:1697,1710c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "SELECT data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? ORDER BY datum_id"; + + try { + return this.jt.query(sql, new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1041c +3:1041c + return null; +====1 +1:1717,1730c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where data_id=? and " + + "group_id=? and tenant_id=? order by datum_id limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, new Object[] {dataId, group, tenantTmp}, sqlFetchRows, + new Object[] {dataId, group, tenantTmp, (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_AGGR_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1048,1058c +3:1048,1058c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + org.springframework.data.domain.Page sPage = configInfoAggrRepository.findAll( + qConfigInfoAggr.dataId.eq(dataId).and(qConfigInfoAggr.groupId.eq(group)) + .and(qConfigInfoAggr.tenantId.eq(tenantTmp)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.by("datumId")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggrList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1737,1831c + String sqlCountRows = "select count(*) from config_info_aggr where "; + String sqlFetchRows = "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + + for (ConfigKey configInfoAggr : configKeys) { + String dataId = configInfoAggr.getDataId(); + String group = configInfoAggr.getGroup(); + String appName = configInfoAggr.getAppName(); + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + Page result = helper + .fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_AGGR_ROW_MAPPER); + return result; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1065c +3:1065c + return null; +====1 +1:1836,1848c + String sql = "SELECT DISTINCT data_id, group_id, tenant_id FROM config_info_aggr"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_CHANGED_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1070,1071c +3:1070,1071c + List list = configInfoAggrRepository.findAllAggrGroup(); + return ConfigInfoChangedMapStruct.INSTANCE.convertConfigInfoChangedList(list); +====1 +1:1853,1864c + String sql = "SELECT datum_id FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND content = ? "; + + try { + return this.jt.queryForList(sql, new Object[] {dataId, groupId, content}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1076c +3:1076c + return null; +====1 +1:1869,1877c + try { + List> list = jt.queryForList( + "SELECT data_id, group_id, tenant_id, app_name, content, gmt_modified FROM config_info WHERE gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertChangeConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1081,1084c +3:1081,1084c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Iterable iterable = configInfoRepository + .findAll(qConfigInfo.gmtModified.goe(startTime).and(qConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList((List) iterable); +====1 +1:1884,1924c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_modified from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + if (!StringUtils.isBlank(tenantTmp)) { + where += " and tenant_id = ? "; + params.add(tenantTmp); + } + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (startTime != null) { + where += " and gmt_modified >=? "; + params.add(startTime); + } + if (endTime != null) { + where += " and gmt_modified <=? "; + params.add(endTime); + } + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + lastMaxId, CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1091c +3:1091c + return null; +====1 +1:1929,1937c + try { + List> list = jt.queryForList( + "SELECT DISTINCT data_id, group_id, tenant_id FROM his_config_info WHERE op_type = 'D' AND gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertDeletedConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1096,1100c +3:1096,1100c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository.findAll( + qHisConfigInfo.opType.eq("D").and(qHisConfigInfo.gmtModified.goe(startTime)) + .and(qHisConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList((List) iterable); +====1 +1:1943,1947c + final String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + final String tenantTmp = + StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + +2:1105a +3:1105a +====1 +1:1953,1960c + + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + + KeyHolder keyHolder = new GeneratedKeyHolder(); + + final String sql = + "INSERT INTO config_info(data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_create," + + "gmt_modified,c_desc,c_use,effect,type,c_schema) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; +2:1111,1120c +3:1111,1120c + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setGmtCreate(time); + configInfoEntity.setGmtModified(time); +====1 +1:1963,1991c + jt.update(new PreparedStatementCreator() { + @Override + public PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + PreparedStatement ps = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS); + ps.setString(1, configInfo.getDataId()); + ps.setString(2, configInfo.getGroup()); + ps.setString(3, tenantTmp); + ps.setString(4, appNameTmp); + ps.setString(5, configInfo.getContent()); + ps.setString(6, md5Tmp); + ps.setString(7, srcIp); + ps.setString(8, srcUser); + ps.setTimestamp(9, time); + ps.setTimestamp(10, time); + ps.setString(11, desc); + ps.setString(12, use); + ps.setString(13, effect); + ps.setString(14, type); + ps.setString(15, schema); + return ps; + } + }, keyHolder); + Number nu = keyHolder.getKey(); + if (nu == null) { + throw new IllegalArgumentException("insert config_info fail"); + } + return nu.longValue(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:1123,1124c +3:1123,1124c + return configInfoRepository.save(configInfoEntity).getId(); + } catch (Exception e) { +====1 +1:2008,2015c + try { + jt.update( + "INSERT INTO config_tags_relation(id,tag_name,tag_type,data_id,group_id,tenant_id) VALUES(?,?,?,?,?,?)", + configId, tagName, null, dataId, group, tenant); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1141,1147c +3:1141,1147c + ConfigTagsRelationEntity configTagsRelation = new ConfigTagsRelationEntity(); + configTagsRelation.setId(configId); + configTagsRelation.setTagName(tagName); + configTagsRelation.setDataId(dataId); + configTagsRelation.setGroupId(group); + configTagsRelation.setTenantId(tenant); + configTagsRelationRepository.save(configTagsRelation); +====1 +1:2020,2025c + try { + jt.update("DELETE FROM config_tags_relation WHERE id=?", id); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1152c +3:1152c + configTagsRelationRepository.findById(id).ifPresent(s -> configTagsRelationRepository.delete(s)); +====1 +1:2030,2040c + String sql = "SELECT tag_name FROM config_tags_relation WHERE tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1157c +3:1157c + return null; +====1 +1:2045,2055c + String sql = "SELECT tag_name FROM config_tags_relation WHERE data_id=? AND group_id=? AND tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {dataId, group, tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1162,1176c +3:1162,1176c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigTagsRelationEntity qConfigTagsRelation = QConfigTagsRelationEntity.configTagsRelationEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigTagsRelation.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigTagsRelation.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigTagsRelation.tenantId.eq(tenant)); + } + Iterable iterable = configTagsRelationRepository.findAll(booleanBuilder); + List result = new ArrayList<>(); + iterable.forEach(s -> result.add(s.getTagName())); + return result; +====1 +1:2061,2067c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + jt.update("DELETE FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, group, + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1182,1187c +3:1182,1187c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); +====1 +1:2068a +2:1189,1190c +3:1189,1190c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + configInfos.forEach(s -> configInfoRepository.delete(s)); +====1 +1:2076,2077c + StringBuilder sql = new StringBuilder(SQL_DELETE_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1198,1200c +3:1198,1200c + if (StringUtils.isBlank(ids)) { + return; + } +====1 +1:2081,2084c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1203a +3:1203a +====1 +1:2087,2093c + sql.append(") "); + try { + jt.update(sql.toString(), paramList.toArray()); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1206,1219c +3:1206,1219c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + for (Long id : paramList) { + configInfoRepository.deleteById(id); + } + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2099,2106c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String tagTmp = StringUtils.isBlank(tag) ? StringUtils.EMPTY : tag; + try { + jt.update("DELETE FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", dataId, + group, tenantTmp, tagTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1225,1234c +3:1225,1234c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); +====1 +1:2107a +2:1236,1251c +3:1236,1251c + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + Iterable configInfoTags = configInfoTagRepository.findAll(booleanBuilder); + configInfoTags.forEach(s -> configInfoTagRepository.delete(s)); + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2113,2115c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String tenantTmp = StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:1257,1258c +3:1257,1258c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:2122,2131c + try { + jt.update("UPDATE config_info SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?," + + "app_name=?,c_desc=?,c_use=?,effect=?,type=?,c_schema=? " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5Tmp, srcIp, srcUser, + time, appNameTmp, desc, use, effect, type, schema, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1265,1272c +3:1265,1272c + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setGmtModified(time); + configInfoRepository.save(configInfoEntity); +====1 +1:2139,2140c + StringBuilder sql = new StringBuilder(SQL_FIND_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1279a +3:1279a +====1 +1:2144,2147c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1282a +3:1282a +====1 +1:2150,2158c + sql.append(") "); + try { + return this.jt.query(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1285,1288c +3:1285,1288c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + List list = (List) configInfoRepository + .findAll(qConfigInfo.id.in(paramList)); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(list); +====1 +1:2163,2176c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAdvanceInfo configAdvance = this.jt.queryForObject( + "SELECT gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_ADVANCE_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1293,1314c +3:1293,1314c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAdvanceInfo configAdvance = ConfigAdvanceInfoMapStruct.INSTANCE.convertConfigAdvanceInfo(configInfo); + List configTagList = this.selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2178c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1315a +3:1315a +====1 +1:2180,2185c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1317c +3:1317c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2186a +2:1319c +3:1319c + return configAdvance; +====1 +1:2191,2206c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAllInfo configAdvance = this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5," + + "gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_ALL_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1324,1346c +3:1324,1346c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAllInfo configAdvance = ConfigAllInfoMapStruct.INSTANCE.convertConfigAllInfo(configInfo); + configAdvance.setGroup(configInfo.getGroupId()); + List configTagList = selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2208c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1347a +3:1347a +====1 +1:2210,2215c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1349c +3:1349c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2216a +2:1351c +3:1351c + return configAdvance; +====1 +1:2225,2233c + try { + jt.update( + "INSERT INTO his_config_info (id,data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_modified,op_type) " + + "VALUES(?,?,?,?,?,?,?,?,?,?,?)", id, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp, appNameTmp, configInfo.getContent(), md5Tmp, srcIp, srcUser, time, ops); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1360,1373c +3:1360,1373c + HisConfigInfoEntity hisConfigInfo = new HisConfigInfoEntity(); + hisConfigInfo.setId(id); + hisConfigInfo.setDataId(configInfo.getDataId()); + hisConfigInfo.setGroupId(configInfo.getGroup()); + hisConfigInfo.setAppName(appNameTmp); + hisConfigInfo.setContent(configInfo.getContent()); + hisConfigInfo.setMd5(md5Tmp); + hisConfigInfo.setGmtModified(time); + hisConfigInfo.setSrcUser(srcUser); + hisConfigInfo.setSrcIp(srcIp); + hisConfigInfo.setOpType(ops); + hisConfigInfo.setTenantId(tenantTmp); + hisConfigInfo.setGmtCreate(time); + hisConfigInfoRepository.save(hisConfigInfo); +====1 +1:2239,2255c + PaginationHelper helper = createPaginationHelper(); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from his_config_info where data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select nid,data_id,group_id,tenant_id,app_name,src_ip,src_user,op_type,gmt_create,gmt_modified from his_config_info " + + "where data_id = ? and group_id = ? and tenant_id = ? order by nid desc"; + + Page page = null; + try { + page = helper + .fetchPage(sqlCountRows, sqlFetchRows, new Object[] {dataId, group, tenantTmp}, pageNo, pageSize, + HISTORY_LIST_ROW_MAPPER); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG + .error("[list-config-history] error, dataId:{}, group:{}", new Object[] {dataId, group}, e); + throw e; + } +2:1379,1389c +3:1379,1389c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + org.springframework.data.domain.Page sPage = hisConfigInfoRepository.findAll( + qHisConfigInfo.dataId.eq(dataId).and(qHisConfigInfo.groupId.eq(group)) + .and(qHisConfigInfo.tenantId.eq(tenant)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("nid")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); +====1 +1:2262,2270c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "INSERT INTO app_configdata_relation_subs(data_id,group_id,app_name,gmt_modified) VALUES(?,?,?,?)", + dataId, group, appNameTmp, date); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1395a +3:1395a +====1 +1:2276,2284c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "UPDATE app_configdata_relation_subs SET gmt_modified=? WHERE data_id=? AND group_id=? AND app_name=?", + time, dataId, group, appNameTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1401c +3:1401c + +==== +1:2289c + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; +2:1406,1434c + <<<<<<< HEAD + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + ||||||| a41d209d5 + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[list-config-history] error, nid:{}", new Object[] {nid}, e); + throw e; + } + ======= + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[detail-config-history] error, nid:{}", new Object[] {nid}, e); + throw e; + } + } + + @Override + public ConfigHistoryInfo detailPreviousConfigHistory(Long id) { + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = (select max(nid) from his_config_info where id = ?) "; +3:1406,1413c + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + } + + @Override + public ConfigHistoryInfo detailPreviousConfigHistory(Long id) { + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = (select max(nid) from his_config_info where id = ?) "; +====1 +1:2292c + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); +2:1437c +3:1416c + .queryForObject(sqlFetchRows, new Object[] {id}, HISTORY_DETAIL_ROW_MAPPER); +====1 +1:2295c + LogUtil.FATAL_LOG.error("[list-config-history] error, nid:{}", new Object[] {nid}, e); +2:1440c +3:1419c + LogUtil.FATAL_LOG.error("[detail-previous-config-history] error, id:{}", new Object[] {id}, e); +====2 +1:2297a +3:1421a +2:1443c + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:2303,2310c + try { + jt.update( + "INSERT INTO tenant_info(kp,tenant_id,tenant_name,tenant_desc,create_source,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?)", + kp, tenantId, tenantName, tenantDesc, createResoure, time, time); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1449,1457c +3:1427,1435c + TenantInfoEntity tenantInfo = new TenantInfoEntity(); + tenantInfo.setKp(kp); + tenantInfo.setTenantId(tenantId); + tenantInfo.setTenantName(tenantName); + tenantInfo.setTenantDesc(tenantDesc); + tenantInfo.setCreateSource(createResoure); + tenantInfo.setGmtCreate(time); + tenantInfo.setGmtModified(time); + tenantInfoRepository.save(tenantInfo); +====1 +1:2315,2322c + try { + jt.update( + "UPDATE tenant_info SET tenant_name = ?, tenant_desc = ?, gmt_modified= ? WHERE kp=? AND tenant_id=?", + tenantName, tenantDesc, System.currentTimeMillis(), kp, tenantId); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1462,1467c +3:1440,1445c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + tenantInfoRepository.findOne(qTenantInfo.kp.eq(kp).and(qTenantInfo.tenantId.eq(tenantId))).ifPresent(s -> { + s.setTenantName(tenantName); + s.setTenantDesc(tenantDesc); + tenantInfoRepository.save(s); + }); +====1 +1:2327,2338c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=?"; + try { + return this.jt.query(sql, new Object[] {kp}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1472,1473c +3:1450,1451c + List list = tenantInfoRepository.findByKp(kp); + return TenantInfoMapStruct.INSTANCE.convertTenantInfoList(list); +====1 +1:2343,2354c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=? AND tenant_id=?"; + try { + return jt.queryForObject(sql, new Object[] {kp, tenantId}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1478,1479c +3:1456,1457c + TenantInfoEntity tenantInfoEntity = tenantInfoRepository.findByKpAndTenantId(kp, tenantId); + return TenantInfoMapStruct.INSTANCE.convertTenantInfo(tenantInfoEntity); +====1 +1:2359,2364c + try { + jt.update("DELETE FROM tenant_info WHERE kp=? AND tenant_id=?", kp, tenantId); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1484,1485c +3:1462,1463c + tenantInfoRepository.findOne(QTenantInfoEntity.tenantInfoEntity.tenantId.eq(tenantId) + .and(QTenantInfoEntity.tenantInfoEntity.kp.eq(kp))).ifPresent(s -> tenantInfoRepository.delete(s)); +====1 +1:2418,2431c + String sqlCountRows = " SELECT COUNT(*) FROM config_info "; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,md5,type,gmt_modified FROM " + + "( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) g, config_info t WHERE g.id = t.id"; + PaginationHelper helper = createPaginationHelper(); + try { + Page page = helper + .fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_WRAPPER_ROW_MAPPER); + + return page.getPageItems(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1539c +3:1517c + return null; +====1 +1:2448,2458c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,type,gmt_modified,md5 FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1556,1560c +3:1534,1538c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + ConfigInfoEntity result = configInfoRepository.findOne( + qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))).orElse(null); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapper(result); +====1 +1:2463,2469c + String sql = String.format("select 1 from %s limit 1", tableName); + try { + jt.queryForObject(sql, Integer.class); + return true; + } catch (Throwable e) { + return false; + } +2:1565c +3:1543c + return true; +====1 +1:2518,2530c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList<>(); + if (!CollectionUtils.isEmpty(ids)) { + where.append(" id in ("); + for (int i = 0; i < ids.size(); i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(ids.get(i)); + } + where.append(") "); +2:1614,1617c +3:1592,1595c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (!org.springframework.util.CollectionUtils.isEmpty(ids)) { + booleanBuilder.and(qConfigInfo.id.in(ids)); +====1 +1:2532,2536c + where.append(" tenant_id=? "); + paramList.add(tenantTmp); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + paramList.add(generateLikeArgument(dataId)); +2:1619,1623c +3:1597,1601c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.like(dataId)); +====1 +1:2539,2540c + where.append(" and group_id=? "); + paramList.add(group); +2:1626c +3:1604c + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:2543,2544c + where.append(" and app_name=? "); + paramList.add(appName); +2:1629c +3:1607c + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:2547,2552c + try { + return this.jt.query(SQL_FIND_ALL_CONFIG_INFO + where, paramList.toArray(), CONFIG_ALL_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1632,1640c +3:1610,1618c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + List resultList = new ArrayList<>(); + configInfos.forEach(s -> { + ConfigAllInfo configAllInfo = new ConfigAllInfo(); + BeanUtils.copyProperties(s, configAllInfo); + configAllInfo.setGroup(s.getGroupId()); + resultList.add(configAllInfo); + }); + return resultList; +====1 +1:2643,2647c + Integer result = this.jt + .queryForObject(SQL_TENANT_INFO_COUNT_BY_TENANT_ID, new String[] {tenantId}, Integer.class); + if (result == null) { + return 0; + } +2:1731,1732c +3:1709,1710c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + Long result = tenantInfoRepository.count(qTenantInfo.tenantId.eq(tenantId)); diff --git a/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_HistoryController.java.txt b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_HistoryController.java.txt new file mode 100644 index 0000000000..b506cbd739 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_HistoryController.java.txt @@ -0,0 +1,120 @@ +====1 +1:31,33c + import javax.servlet.http.HttpServletRequest; + import javax.servlet.http.HttpServletResponse; + +2:30a +3:30a +====2 +1:42c +3:39c + +2:39c + +====2 +1:45c +3:42c + +2:42c + +====1 +1:49,53c + * @param dataId dataId string value. + * @param group group string value. + * @param tenant tenant string value. + * @param appName appName string value. + * @param pageNo pageNo string value. +2:46,50c +3:46,50c + * @param dataId dataId string value. + * @param group group string value. + * @param tenant tenant string value. + * @param appName appName string value. + * @param pageNo pageNo string value. +====2 +1:60,66c +3:57,63c + @RequestParam("group") String group, // + @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant, + @RequestParam(value = "appName", required = false) String appName, + @RequestParam(value = "pageNo", required = false) Integer pageNo, + // + @RequestParam(value = "pageSize", required = false) Integer pageSize, // + ModelMap modelMap) { +2:57,63c + @RequestParam("group") String group, // + @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant, + @RequestParam(value = "appName", required = false) String appName, + @RequestParam(value = "pageNo", required = false) Integer pageNo, + // + @RequestParam(value = "pageSize", required = false) Integer pageSize, // + ModelMap modelMap) { +====2 +1:73c +3:70c + +2:70c + +====1 +1:75c + * Query the detailed configuration history informations. +2:72,75c +3:72,75c + * Query the detailed configuration history information. + * + * @param nid history_config_info nid + * @return history config info +==== +1:78,79c + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { +2:78,86c + <<<<<<< HEAD + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { + ||||||| a41d209d5 + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { + ======= + public ConfigHistoryInfo getConfigHistoryInfo(@RequestParam("nid") Long nid) { + >>>>>>> TEMP_RIGHT_BRANCH +3:78c + public ConfigHistoryInfo getConfigHistoryInfo(@RequestParam("nid") Long nid) { +====2 +1:81a +3:80a +2:89,93c + <<<<<<< HEAD + + ||||||| a41d209d5 + + ======= +==== +1:82a +2:95,107c + /** + * Query previous config history information. + * + * @param id config_info id + * @return history config info + * @since 1.4.0 + */ + @GetMapping(value = "/previous") + public ConfigHistoryInfo getPreviousConfigHistoryInfo(@RequestParam("id") Long id) { + return persistService.detailPreviousConfigHistory(id); + } + + >>>>>>> TEMP_RIGHT_BRANCH +3:82,93c + /** + * Query previous config history information. + * + * @param id config_info id + * @return history config info + * @since 1.4.0 + */ + @GetMapping(value = "/previous") + public ConfigHistoryInfo getPreviousConfigHistoryInfo(@RequestParam("id") Long id) { + return persistService.detailPreviousConfigHistory(id); + } + diff --git a/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_MergeDatumService.java.txt b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_MergeDatumService.java.txt new file mode 100644 index 0000000000..1d2e6e10d3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_MergeDatumService.java.txt @@ -0,0 +1,43 @@ +====1 +1:30,31c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:30,32c +3:30,32c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.ApplicationUtils; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:109c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIp()); +2:110c +3:110c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIP()); +====1 +1:117c + if (ApplicationUtils.getStandaloneMode()) { +2:118c +3:118c + if (EnvUtil.getStandaloneMode()) { +==== +1:166c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); +2:167,177c + <<<<<<< HEAD + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn( + "[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); + ||||||| a41d209d5 + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn("[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); + ======= + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIP(), null); +3:167c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIP(), null); +====2 +1:168a +3:169a +2:180c + >>>>>>> TEMP_RIGHT_BRANCH diff --git a/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_MergeTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_MergeTaskProcessor.java.txt new file mode 100644 index 0000000000..5df60ac501 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_MergeTaskProcessor.java.txt @@ -0,0 +1,47 @@ +==== +1:19a +2:20,26c + <<<<<<< HEAD + import com.alibaba.nacos.common.task.AbstractDelayTask; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; + ======= + import com.alibaba.nacos.common.task.NacosTask; +3:20c + import com.alibaba.nacos.common.task.NacosTask; +==== +1:21c + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:28c + >>>>>>> TEMP_RIGHT_BRANCH +3:21a +====1 +1:22a +2:30c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:31c + import com.alibaba.nacos.core.utils.InetUtils; +2:39c +3:32c + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:55c + public boolean process(AbstractDelayTask task) { +2:63c +3:56c + public boolean process(NacosTask task) { +====1 +1:87c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +2:95c +3:88c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), +====1 +1:101c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +2:109c +3:102c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_NotifyTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_NotifyTaskProcessor.java.txt new file mode 100644 index 0000000000..9dcd6442e3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_NotifyTaskProcessor.java.txt @@ -0,0 +1,67 @@ +==== +1:19a +2:20,26c + <<<<<<< HEAD + import com.alibaba.nacos.common.task.AbstractDelayTask; + ||||||| a41d209d5 + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; + ======= + import com.alibaba.nacos.common.task.NacosTask; +3:20c + import com.alibaba.nacos.common.task.NacosTask; +==== +1:21c + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:28c + >>>>>>> TEMP_RIGHT_BRANCH +3:21a +====1 +1:22a +2:30c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:27,28c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +2:35,36c +3:28,29c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:49c + public boolean process(AbstractDelayTask task) { +2:57c +3:50c + public boolean process(NacosTask task) { +====1 +1:76c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +2:84c +3:77c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====1 +1:78c + .format(URL_PATTERN, serverIp, ApplicationUtils.getContextPath(), dataId, group); +2:86c +3:79c + .format(URL_PATTERN, serverIp, EnvUtil.getContextPath(), dataId, group); +====1 +1:82c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:90c +3:83c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====1 +1:92c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:100c +3:93c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====1 +1:100c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +2:108c +3:101c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_application.properties.txt b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_application.properties.txt new file mode 100644 index 0000000000..1578795f12 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_application.properties.txt @@ -0,0 +1,90 @@ +====1 +1:40,41c + # db.user=nacos + # db.password=nacos +2:40,41c +3:40,41c + # db.user.0=nacos + # db.password.0=nacos +====1 +1:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-fe/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +2:112c +3:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-ui/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +==== +1:177a +2:178,215c + <<<<<<< HEAD + + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + + ||||||| a41d209d5 + ======= + + >>>>>>> TEMP_RIGHT_BRANCH +3:178,211c + + + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + diff --git a/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_pom.xml.txt new file mode 100644 index 0000000000..99b5611a8e --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/intellimerge/diff_pom.xml.txt @@ -0,0 +1,169 @@ +====1 +1:25c + 1.4.0-SNAPSHOT +2:25c +3:25c + 1.4.1-SNAPSHOT +====1 +1:39c + nacos-all-1.4.0-SNAPSHOT +2:39c +3:39c + nacos-all-1.4.1-SNAPSHOT +====1 +1:129c + 2.1.16.RELEASE +2:129c +3:129c + 2.1.17.RELEASE +====1 +1:131c + 2.6 +2:130a +3:130a +====1 +1:133c + 2.2 +2:132c +3:132c + 2.6 +====1 +1:144c + 1.7.17 +2:142a +3:142a +====1 +1:170a +2:169,175c +3:169,175c + 1.3.2.beta1 + 1.3.2.beta1 + 1.3.1.Final + 19.3.0.0 + 4.2.1 + 3.4.1 + 1.18.12 +====1 +1:279a +2:285,287c +3:285,287c + **/com/alibaba/nacos/config/server/modules/entity/*.java + **/com/alibaba/nacos/config/server/modules/mapstruct/*.java + **/com/alibaba/nacos/config/server/configuration/datasource/DynamicDataSource.java +==== +1:307c + **/istio/model/**,**/nacos/test/** +2:315,321c + <<<<<<< HEAD + **/istio/model/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** + ||||||| a41d209d5 + **/istio/model/**,**/nacos/test/** + ======= + **/istio/model/**,**/consistency/entity/**,**/nacos/test/** + >>>>>>> TEMP_RIGHT_BRANCH +3:315c + **/istio/model/**,**/consistency/entity/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** +====1 +1:341a +2:356c +3:350c + /console-ui/** +====1 +1:553a +2:569,587c +3:563,581c + + remove-test-data + + + + org.apache.maven.plugins + maven-clean-plugin + + false + + + ${user.home}/nacos/data + + + + + + + +====1 +1:581a +2:616c +3:610c + sys +====1 +1:688a +2:724,728c +3:718,722c + + ${project.groupId} + nacos-sys + ${project.version} + +====1 +1:712,717c + + commons-lang + commons-lang + ${commons-lang.version} + + +2:751a +3:745a +====1 +1:817,822c + + com.ning + async-http-client + ${async-http-client.version} + + +2:850a +3:844a +====1 +1:1027a +2:1056,1093c +3:1050,1087c + + + org.mapstruct + mapstruct-jdk8 + ${mapstruct.version} + + + + org.mapstruct + mapstruct-processor + ${mapstruct.version} + + + + org.projectlombok + lombok + true + ${lombok.version} + + + + com.querydsl + querydsl-jpa + ${querydsl.version} + + + + com.zaxxer + HikariCP + ${hikariCP.version} + + + + com.oracle.ojdbc + ojdbc8 + ${ojdbc.version} + + diff --git a/src/python/merge_conflict_analysis_diffs/111/spork/diff_AsyncNotifyService.java.txt b/src/python/merge_conflict_analysis_diffs/111/spork/diff_AsyncNotifyService.java.txt new file mode 100644 index 0000000000..b0d839171d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/spork/diff_AsyncNotifyService.java.txt @@ -0,0 +1,74 @@ +====3 +1:35,36c +2:35,36c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +3:35,36c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====1 +1:107c + +2:107c +3:107c + +====3 +1:130c +2:130c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +3:130c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_UNHEALTH, +==== +1:136,137c + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +2:136,138c + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, + String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +3:136,137c + header.addParam(NotifyService.NOTIFY_HEADER_LAST_MODIFIED, String.valueOf(task.getLastModified())); + header.addParam(NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====1 +1:157c + +2:158c +3:157c + +====3 +1:171c +2:172c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +3:171c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_OK, delayed, +====3 +1:177c +2:178c + task.getLastModified(), InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +3:177c + task.getLastModified(), InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_ERROR, delayed, +====3 +1:199c +2:200c + InetUtils.getSelfIp(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +3:199c + InetUtils.getSelfIP(), ConfigTraceService.NOTIFY_EVENT_EXCEPTION, delayed, task.target); +====3 +1:262c +2:263c + this.url = MessageFormat.format(URL_PATTERN, target, ApplicationUtils.getContextPath(), dataId, group); +3:262c + this.url = MessageFormat.format(URL_PATTERN, target, EnvUtil.getContextPath(), dataId, group); +====3 +1:265c +2:266c + .format(URL_PATTERN_TENANT, target, ApplicationUtils.getContextPath(), dataId, group, tenant); +3:265c + .format(URL_PATTERN_TENANT, target, EnvUtil.getContextPath(), dataId, group, tenant); +====1 +1:312c + } +\ No newline at end of file +2:313c +3:312c + } diff --git a/src/python/merge_conflict_analysis_diffs/111/spork/diff_ConfigController.java.txt b/src/python/merge_conflict_analysis_diffs/111/spork/diff_ConfigController.java.txt new file mode 100644 index 0000000000..b8fee64e3d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/spork/diff_ConfigController.java.txt @@ -0,0 +1,183 @@ +====3 +1:18a +2:18a +3:19c + import com.alibaba.nacos.api.config.ConfigType; +====3 +1:22a +2:22a +3:24c + import com.alibaba.nacos.common.model.RestResultUtils; +====1 +1:34a +2:35c +3:37c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +====1 +1:38c + import com.alibaba.nacos.config.server.model.event.ConfigDataChangeEvent; +2:38a +3:40a +====3 +1:45a +2:45a +3:48c + import com.alibaba.nacos.common.utils.NamespaceUtil; +====3 +1:48c +2:48c + import com.alibaba.nacos.core.utils.InetUtils; +3:51c + import com.alibaba.nacos.sys.utils.InetUtils; +====3 +1:93,94c +2:93,94c + private static final String NAMESPACE_PUBLIC_KEY = "public"; + +3:95a +====3 +1:137a +2:137a +3:139,142c + //check type + if (!ConfigType.isValidType(type)) { + type = ConfigType.getDefaultType().getType(); + } +====3 +1:178c +2:178c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIp(), +3:183c + .logPersistenceEvent(dataId, group, tenant, requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====3 +1:199c +2:199c + tenant = processTenant(tenant); +3:204c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====3 +1:283c +2:283c + return ResultBuilder.buildSuccessResult(true); +3:288c + return RestResultUtils.success(true); +====3 +1:472c +2:472c + tenant = processTenant(tenant); +3:477c + tenant = NamespaceUtil.processNamespaceParameter(tenant); +====3 +1:527c +2:527c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +3:532c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====3 +1:530,534c +2:530,534c + if (StringUtils.isNotBlank(namespace)) { + if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); + } +3:535,538c + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { + failedData.put("succCount", 0); + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====3 +1:535a +2:535a +3:540c + +====3 +1:548c +2:548c + return ResultBuilder.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +3:553c + return RestResultUtils.buildResult(ResultCodeEnum.METADATA_ILLEGAL, failedData); +====3 +1:560c +2:560c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +3:565c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_VALIDATION_FAILED, failedData); +====3 +1:584c +2:584c + return ResultBuilder.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +3:589c + return RestResultUtils.buildResult(ResultCodeEnum.PARSING_DATA_FAILED, failedData); +====3 +1:588c +2:588c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +3:593c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====3 +1:601c +2:601c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +3:606c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====3 +1:604c +2:604c + return ResultBuilder.buildSuccessResult("导入成功", saveResult); +3:609c + return RestResultUtils.success("导入成功", saveResult); +====3 +1:628c +2:628c + return ResultBuilder.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +3:633c + return RestResultUtils.buildResult(ResultCodeEnum.NO_SELECTED_CONFIG, failedData); +====3 +1:631,634c +2:631,634c + + if (NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(namespace)) { + namespace = ""; + } else if (persistService.tenantInfoCountByTenantId(namespace) <= 0) { +3:636,638c + + namespace = NamespaceUtil.processNamespaceParameter(namespace); + if (StringUtils.isNotBlank(namespace) && persistService.tenantInfoCountByTenantId(namespace) <= 0) { +====3 +1:636c +2:636c + return ResultBuilder.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +3:640c + return RestResultUtils.buildResult(ResultCodeEnum.NAMESPACE_NOT_EXIST, failedData); +====3 +1:650c +2:650c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +3:654c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====3 +1:674c +2:674c + return ResultBuilder.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +3:678c + return RestResultUtils.buildResult(ResultCodeEnum.DATA_EMPTY, failedData); +====3 +1:687c +2:687c + requestIpApp, time.getTime(), InetUtils.getSelfIp(), +3:691c + requestIpApp, time.getTime(), InetUtils.getSelfIP(), +====3 +1:690,697c +2:690,697c + return ResultBuilder.buildSuccessResult("Clone Completed Successfully", saveResult); + } + + private String processTenant(String tenant) { + if (StringUtils.isEmpty(tenant) || NAMESPACE_PUBLIC_KEY.equalsIgnoreCase(tenant)) { + return ""; + } + return tenant; +3:694c + return RestResultUtils.success("Clone Completed Successfully", saveResult); diff --git a/src/python/merge_conflict_analysis_diffs/111/spork/diff_DiskUtils.java.txt b/src/python/merge_conflict_analysis_diffs/111/spork/diff_DiskUtils.java.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/python/merge_conflict_analysis_diffs/111/spork/diff_EmbeddedRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/spork/diff_EmbeddedRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..c5d1158c4b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/spork/diff_EmbeddedRolePersistServiceImpl.java.txt @@ -0,0 +1,163 @@ +====1 +1:21c + import com.alibaba.nacos.config.server.service.repository.embedded.EmbeddedStoragePersistServiceImpl; +2:20a +3:20a +====1 +1:23a +2:23c +3:23c + import com.alibaba.nacos.config.server.service.repository.embedded.EmbeddedStoragePersistServiceImpl; +====3 +1:30a +2:30a +3:31c + import java.util.Collections; +====2 +1:43c +3:44c + +2:43c + +====2 +1:46c +3:47c + +2:46c + +====2 +1:49c +3:50c + +2:49c + +====2 +1:51c +3:52c + +2:51c + +====2 +1:53c +3:54c + +2:53c + +====2 +1:56c +3:57c + +2:56c + +====2 +1:58c +3:59c + +2:58c + +====2 +1:68c +3:69c + +2:68c + +====2 +1:70c +3:71c + +2:70c + +====2 +1:72c +3:73c + +2:72c + +====2 +1:74c +3:75c + +2:74c + +==== +1:77,80c + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { +2:77,80c + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { +3:78,83c + + String where = " username= ? "; + List params = new ArrayList<>(); + if (StringUtils.isNotBlank(username)) { + params = Collections.singletonList(username); + } else { +==== +1:83,84c + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, +2:83,84c + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, +3:86,87c + + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, +====2 +1:86c +3:89c + +2:86c + +====2 +1:88c +3:91c + +2:88c + +====2 +1:96c +3:99c + +2:96c + +====2 +1:98c +3:101c + +2:98c + +====2 +1:106c +3:109c + +2:106c + +====2 +1:121c +3:124c + +2:121c + +====2 +1:137c +3:140c + +2:137c + +====2 +1:141c +3:144c + List users = databaseOperate.queryMany(sql, new String[] {"%" + role + "%"}, String.class); +2:141c + List users = databaseOperate.queryMany(sql, new String[]{"%" + role + "%"}, String.class); +====2 +1:144c +3:147c + +2:144c + diff --git a/src/python/merge_conflict_analysis_diffs/111/spork/diff_ExternalDataSourceServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/spork/diff_ExternalDataSourceServiceImpl.java.txt new file mode 100644 index 0000000000..0492051512 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/spork/diff_ExternalDataSourceServiceImpl.java.txt @@ -0,0 +1,150 @@ +====1 +1:19,39c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; + + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import javax.sql.DataSource; + + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + +2:18a +3:18a +====3 +1:40a +2:19a +3:20c + import com.alibaba.nacos.common.utils.IPUtil; +====2 +1:43c +3:23c + import com.alibaba.nacos.config.server.utils.ConfigExecutor; +2:21a +====3 +1:45c +2:23c + import com.alibaba.nacos.core.utils.ApplicationUtils; +3:25c + import com.alibaba.nacos.sys.env.EnvUtil; +==== +1:46a +2:25,42c + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.regex.Matcher; + import java.util.regex.Pattern; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +3:27,41c + import org.springframework.dao.DataAccessException; + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.transaction.support.TransactionTemplate; + + import javax.sql.DataSource; + import java.io.IOException; + import java.util.ArrayList; + import java.util.List; + import java.util.concurrent.TimeUnit; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.utils.LogUtil.DEFAULT_LOG; + import static com.alibaba.nacos.config.server.utils.LogUtil.FATAL_LOG; +====3 +1:55,58c +2:51,54c + private static final Logger LOGGER = LoggerFactory.getLogger(ExternalDataSourceServiceImpl.class); + + private static final String JDBC_DRIVER_NAME = "com.mysql.cj.jdbc.Driver"; + +3:49a +====3 +1:86,87c +2:82,83c + private static Pattern ipPattern = Pattern.compile("\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}"); + +3:76a +====1 +1:120,122c + + ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +2:116,118c +3:109,111c + + // ConfigExecutor.scheduleConfigTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS); + // ConfigExecutor.scheduleConfigTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS); +====3 +1:130c +2:126c + .build(ApplicationUtils.getEnvironment(), (dataSource) -> { +3:119c + .build(EnvUtil.getEnvironment(), (dataSource) -> { +====3 +1:193c +2:189c + return "DOWN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +3:182c + return "DOWN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====3 +1:196c +2:192c + return "WARN:" + getIpFromUrl(dataSourceList.get(i).getJdbcUrl()); +3:185c + return "WARN:" + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); +====3 +1:204,217c +2:200,213c + private String getIpFromUrl(String url) { + + Matcher m = ipPattern.matcher(url); + if (m.find()) { + return m.group(); + } + + return ""; + } + + static String defaultIfNull(String value, String defaultValue) { + return null == value ? defaultValue : value; + } + +3:192a +====3 +1:272c +2:268c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +3:247c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); +====3 +1:275c +2:271c + getIpFromUrl(dataSourceList.get(i).getJdbcUrl())); +3:250c + IPUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl())); diff --git a/src/python/merge_conflict_analysis_diffs/111/spork/diff_ExternalPermissionPersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/spork/diff_ExternalPermissionPersistServiceImpl.java.txt new file mode 100644 index 0000000000..5424bf2de0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/spork/diff_ExternalPermissionPersistServiceImpl.java.txt @@ -0,0 +1,128 @@ +==== +1:21,24c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; +2:21,24c + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; +3:21,28c + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + import com.alibaba.nacos.config.server.modules.entity.PermissionsEntity; + import com.alibaba.nacos.config.server.modules.entity.QPermissionsEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.PermissionsMapStruct; + import com.alibaba.nacos.config.server.modules.repository.PermissionsRepository; +====1 +1:27,28c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; +2:27c +3:31c + import org.springframework.data.domain.PageRequest; +==== +1:31,35c + import javax.annotation.PostConstruct; + import java.util.ArrayList; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.PERMISSION_ROW_MAPPER; + +2:29a +3:34,40c + import javax.annotation.PostConstruct; + import java.util.ArrayList; + import java.util.Collections; + import java.util.List; + + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.PERMISSION_ROW_MAPPER; + +====1 +1:46,53c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:40c +3:51c + private PermissionsRepository permissionsRepository; +====1 +1:56,83c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from permissions where "; + String sqlFetchRows = "select role,resource,action from permissions where "; + + String where = " role='" + role + "' "; + + if (StringUtils.isBlank(role)) { + where = " 1=1 "; + } + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, PERMISSION_ROW_MAPPER); + + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + + return pageInfo; + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:43,50c +3:54,61c + org.springframework.data.domain.Page sPage = permissionsRepository + .findAll(QPermissionsEntity.permissionsEntity.role.eq(role), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(PermissionsMapStruct.INSTANCE.convertPermissionInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:94,102c + + String sql = "INSERT into permissions (role, resource, action) VALUES (?, ?, ?)"; + + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:61,62c +3:72,73c + + permissionsRepository.save(new PermissionsEntity(role, resource, action)); +====1 +1:113,120c + + String sql = "DELETE from permissions WHERE role=? and resource=? and action=?"; + try { + jt.update(sql, role, resource, action); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:73,77c +3:84,88c + + QPermissionsEntity qPermissions = QPermissionsEntity.permissionsEntity; + permissionsRepository.findOne( + qPermissions.role.eq(role).and(qPermissions.resource.eq(resource)).and(qPermissions.action.eq(action))) + .ifPresent(p -> permissionsRepository.delete(p)); diff --git a/src/python/merge_conflict_analysis_diffs/111/spork/diff_ExternalRolePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/spork/diff_ExternalRolePersistServiceImpl.java.txt new file mode 100644 index 0000000000..17328923af --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/spork/diff_ExternalRolePersistServiceImpl.java.txt @@ -0,0 +1,204 @@ +==== +1:21,24c + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; +2:21,24c + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; +3:21,28c + import com.alibaba.nacos.config.server.service.repository.PaginationHelper; + import com.alibaba.nacos.config.server.service.repository.extrnal.ExternalStoragePersistServiceImpl; + import com.alibaba.nacos.config.server.utils.LogUtil; + import org.apache.commons.lang3.StringUtils; + import com.alibaba.nacos.config.server.modules.entity.QRolesEntity; + import com.alibaba.nacos.config.server.modules.entity.RolesEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.RoleInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.RolesRepository; +====1 +1:27,29c + import org.springframework.jdbc.CannotGetJdbcConnectionException; + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.RowMapper; +2:27c +3:31c + import org.springframework.data.domain.PageRequest; +==== +1:32,35c + import javax.annotation.PostConstruct; + import java.sql.ResultSet; + import java.sql.SQLException; + import java.util.ArrayList; +2:29a +3:34,38c + import javax.annotation.PostConstruct; + import java.sql.ResultSet; + import java.sql.SQLException; + import java.util.ArrayList; + import java.util.Collections; +====1 +1:36a +2:31c +3:40c + import java.util.stream.Collectors; +====1 +1:38c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.ROLE_INFO_ROW_MAPPER; +2:32a +3:41a +====1 +1:50,57c + private ExternalStoragePersistServiceImpl persistService; + + private JdbcTemplate jt; + + @PostConstruct + protected void init() { + jt = persistService.getJdbcTemplate(); + } +2:44c +3:53c + private RolesRepository rolesRepository; +====1 +1:61,81c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from (select distinct role from roles) roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " 1=1 "; + + try { + Page pageInfo = helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + if (pageInfo == null) { + pageInfo = new Page<>(); + pageInfo.setTotalCount(0); + pageInfo.setPageItems(new ArrayList<>()); + } + return pageInfo; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:48,55c +3:57,64c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:86,104c + PaginationHelper helper = persistService.createPaginationHelper(); + + String sqlCountRows = "select count(*) from roles where "; + String sqlFetchRows = "select role,username from roles where "; + + String where = " username='" + username + "' "; + + if (StringUtils.isBlank(username)) { + where = " 1=1 "; + } + + try { + return helper + .fetchPage(sqlCountRows + where, sqlFetchRows + where, new ArrayList().toArray(), pageNo, + pageSize, ROLE_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:60,67c +3:69,76c + org.springframework.data.domain.Page sPage = rolesRepository + .findAll(QRolesEntity.rolesEntity.username.eq(username), PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(RoleInfoMapStruct.INSTANCE.convertRoleInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:110c + * @param role role string value. +2:73c +3:82c + * @param role role string value. +====1 +1:115,122c + String sql = "INSERT into roles (role, username) VALUES (?, ?)"; + + try { + jt.update(sql, role, userName); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:78c +3:87c + rolesRepository.save(new RolesEntity(userName, role)); +====1 +1:131,137c + String sql = "DELETE from roles WHERE role=?"; + try { + jt.update(sql, role); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:87,88c +3:96,97c + Iterable iterable = rolesRepository.findAll(QRolesEntity.rolesEntity.role.eq(role)); + rolesRepository.deleteAll(iterable); +====1 +1:143c + * @param role role string value. +2:94c +3:103c + * @param role role string value. +====1 +1:147,153c + String sql = "DELETE from roles WHERE role=? and username=?"; + try { + jt.update(sql, role, username); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:98,100c +3:107,109c + QRolesEntity qRoles = QRolesEntity.rolesEntity; + rolesRepository.findOne(qRoles.role.eq(role).and(qRoles.username.eq(username))) + .ifPresent(s -> rolesRepository.delete(s)); +====1 +1:158,160c + String sql = "SELECT role FROM roles WHERE role like '%' ? '%'"; + List users = this.jt.queryForList(sql, new String[]{role}, String.class); + return users; +2:105,107c +3:114,116c + List rolesEntities = (List) rolesRepository + .findAll(QRolesEntity.rolesEntity.role.like(role)); + return rolesEntities.stream().map(s -> s.getRole()).collect(Collectors.toList()); +====1 +1:163,172c + private static final class RoleInfoRowMapper implements RowMapper { + + @Override + public RoleInfo mapRow(ResultSet rs, int rowNum) throws SQLException { + RoleInfo roleInfo = new RoleInfo(); + roleInfo.setRole(rs.getString("role")); + roleInfo.setUsername(rs.getString("username")); + return roleInfo; + } + } +2:109a +3:118a diff --git a/src/python/merge_conflict_analysis_diffs/111/spork/diff_ExternalStoragePersistServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/111/spork/diff_ExternalStoragePersistServiceImpl.java.txt new file mode 100644 index 0000000000..d4cc7360ae --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/spork/diff_ExternalStoragePersistServiceImpl.java.txt @@ -0,0 +1,2999 @@ +====1 +1:41,42c + import com.alibaba.nacos.config.server.service.datasource.DataSourceService; + import com.alibaba.nacos.config.server.service.datasource.DynamicDataSource; +2:41,74c +3:41,74c + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.ConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.HisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoAggrEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoBetaEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigInfoTagEntity; + import com.alibaba.nacos.config.server.modules.entity.QConfigTagsRelationEntity; + import com.alibaba.nacos.config.server.modules.entity.QHisConfigInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.QTenantInfoEntity; + import com.alibaba.nacos.config.server.modules.entity.TenantInfoEntity; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAdvanceInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigAllInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigHistoryInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4BetaMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfo4TagMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoAggrMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoBetaWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoChangedMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoEntityMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoTagWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.ConfigInfoWrapperMapStruct; + import com.alibaba.nacos.config.server.modules.mapstruct.TenantInfoMapStruct; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoAggrRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoBetaRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigInfoTagRepository; + import com.alibaba.nacos.config.server.modules.repository.ConfigTagsRelationRepository; + import com.alibaba.nacos.config.server.modules.repository.HisConfigInfoRepository; + import com.alibaba.nacos.config.server.modules.repository.TenantInfoRepository; +====1 +1:48c + import com.google.common.collect.Lists; +2:80,81c +3:80,81c + import com.querydsl.core.BooleanBuilder; + import lombok.extern.slf4j.Slf4j; +====1 +1:50a +2:84,85c +3:84,85c + import org.springframework.beans.BeanUtils; + import org.springframework.beans.factory.annotation.Autowired; +====1 +1:55c + import org.springframework.dao.IncorrectResultSizeDataAccessException; +2:90,92c +3:90,92c + import org.springframework.data.domain.PageRequest; + import org.springframework.data.domain.Sort; + import org.springframework.data.jpa.domain.Specification; +====1 +1:57,61c + import org.springframework.jdbc.core.JdbcTemplate; + import org.springframework.jdbc.core.PreparedStatementCreator; + import org.springframework.jdbc.core.PreparedStatementSetter; + import org.springframework.jdbc.support.GeneratedKeyHolder; + import org.springframework.jdbc.support.KeyHolder; +2:93a +3:93a +====1 +1:70c + import javax.annotation.PostConstruct; +2:102,105c +3:102,105c + import javax.persistence.criteria.CriteriaBuilder; + import javax.persistence.criteria.CriteriaQuery; + import javax.persistence.criteria.Predicate; + import javax.persistence.criteria.Root; +====1 +1:72,75c + import java.sql.Connection; + import java.sql.PreparedStatement; + import java.sql.SQLException; + import java.sql.Statement; +2:106a +3:106a +====1 +1:81a +2:113c +3:113c + import java.util.stream.Collectors; +====1 +1:83,97c + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ADVANCE_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_ALL_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4BETA_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO4TAG_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_AGGR_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BASE_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_CHANGED_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_INFO_WRAPPER_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.CONFIG_KEY_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_DETAIL_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.HISTORY_LIST_ROW_MAPPER; + import static com.alibaba.nacos.config.server.service.repository.RowMapperManager.TENANT_INFO_ROW_MAPPER; +2:114a +3:114a +====1 +1:104a +2:122c +3:122c + @Slf4j +====1 +1:110c + private DataSourceService dataSourceService; +2:128,129c +3:128,129c + @Autowired + private ConfigInfoRepository configInfoRepository; +====1 +1:112c + private static final String SQL_FIND_ALL_CONFIG_INFO = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,c_schema from config_info"; +2:131,132c +3:131,132c + @Autowired + private ConfigInfoBetaRepository configInfoBetaRepository; +====1 +1:114c + private static final String SQL_TENANT_INFO_COUNT_BY_TENANT_ID = "select count(1) from tenant_info where tenant_id = ?"; +2:134,135c +3:134,135c + @Autowired + private ConfigInfoTagRepository configInfoTagRepository; +====1 +1:116c + private static final String SQL_FIND_CONFIG_INFO_BY_IDS = "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5 FROM config_info WHERE "; +2:137,138c +3:137,138c + @Autowired + private ConfigTagsRelationRepository configTagsRelationRepository; +====1 +1:118c + private static final String SQL_DELETE_CONFIG_INFO_BY_IDS = "DELETE FROM config_info WHERE "; +2:140,141c +3:140,141c + @Autowired + private HisConfigInfoRepository hisConfigInfoRepository; +====1 +1:120c + private static final String PATTERN_STR = "*"; +2:143,144c +3:143,144c + @Autowired + private TenantInfoRepository tenantInfoRepository; +====1 +1:122c + private static final int QUERY_LIMIT_SIZE = 50; +2:146,147c +3:146,147c + @Autowired + private ConfigInfoAggrRepository configInfoAggrRepository; +====1 +1:124,126c + protected JdbcTemplate jt; + + protected TransactionTemplate tjt; +2:149,150c +3:149,150c + @Autowired + private TransactionTemplate tjt; +====1 +1:133,171c + /** + * init datasource. + */ + @PostConstruct + public void init() { + dataSourceService = DynamicDataSource.getInstance().getDataSource(); + + jt = getJdbcTemplate(); + tjt = getTransactionTemplate(); + } + + public boolean checkMasterWritable() { + return dataSourceService.checkMasterWritable(); + } + + public void setBasicDataSourceService(DataSourceService dataSourceService) { + this.dataSourceService = dataSourceService; + } + + public synchronized void reload() throws IOException { + this.dataSourceService.reload(); + } + + /** + * For unit testing. + */ + public JdbcTemplate getJdbcTemplate() { + return this.dataSourceService.getJdbcTemplate(); + } + + public TransactionTemplate getTransactionTemplate() { + return this.dataSourceService.getTransactionTemplate(); + } + + @SuppressWarnings("checkstyle:AbbreviationAsWordInName") + public String getCurrentDBUrl() { + return this.dataSourceService.getCurrentDbUrl(); + } + +2:156a +3:156a +====1 +1:174c + return new ExternalStoragePaginationHelperImpl(jt); +2:159c +3:159c + return null; +====1 +1:182,191c + boolean result = tjt.execute(status -> { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:167,183c +3:167,183c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + long configId = addConfigInfoAtomic(-1, srcIp, srcUser, configInfo, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + addConfigTagsRelation(configId, configTags, configInfo.getDataId(), configInfoEntity.getGroupId(), + configInfoEntity.getTenantId()); + insertConfigHistoryAtomic(0, configInfo, srcIp, srcUser, time, "I"); + + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; + } + return Boolean.TRUE; +====1 +1:193c + return Boolean.TRUE; +2:184a +3:184a +====1 +1:202c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:193c +3:193c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:204,207c + jt.update("INSERT INTO config_info_beta(data_id,group_id,tenant_id,app_name,content,md5,beta_ips,src_ip," + + "src_user,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, appNameTmp, configInfo.getContent(), md5, betaIps, srcIp, srcUser, + time, time); +2:195,207c +3:195,207c + ConfigInfoBetaEntity configInfoBeta = new ConfigInfoBetaEntity(); + configInfoBeta.setDataId(configInfo.getDataId()); + configInfoBeta.setGroupId(configInfo.getGroup()); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setBetaIps(betaIps); + configInfoBeta.setMd5(md5); + configInfoBeta.setGmtCreate(time); + configInfoBeta.setGmtModified(time); + configInfoBeta.setSrcUser(srcUser); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setTenantId(tenantTmp); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:209c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:209c +3:209c + log.error("[db-error] " + e.toString(), e); +====1 +1:220c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:219a +3:219a +====1 +1:222,226c + jt.update( + "INSERT INTO config_info_tag(data_id,group_id,tenant_id,tag_id,app_name,content,md5,src_ip,src_user," + + "gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?,?,?,?,?)", configInfo.getDataId(), + configInfo.getGroup(), tenantTmp, tagTmp, appNameTmp, configInfo.getContent(), md5, srcIp, srcUser, + time, time); +2:221,234c +3:221,234c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoTagEntity configInfoTag = new ConfigInfoTagEntity(); + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:228c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:236c +3:236c + log.error("[db-error] " + e.toString(), e); +====1 +1:236,254c + boolean result = tjt.execute(status -> { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + /* + If the appName passed by the user is not empty, use the persistent user's appName, + otherwise use db; when emptying appName, you need to pass an empty string + */ + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // delete all tags and then recreate + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); +2:244,268c +3:244,268c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo oldConfigInfo = findConfigInfo(configInfo.getDataId(), configInfo.getGroup(), + configInfo.getTenant()); + String appNameTmp = oldConfigInfo.getAppName(); + // 用户传过来的appName不为空,则用持久化用户的appName,否则用db的;清空appName的时候需要传空串 + if (configInfo.getAppName() == null) { + configInfo.setAppName(appNameTmp); + } + configInfo.setId(oldConfigInfo.getId()); + updateConfigInfoAtomic(configInfo, srcIp, srcUser, time, configAdvanceInfo); + String configTags = + configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + if (configTags != null) { + // 删除所有tag,然后再重新创建 + removeTagByIdAtomic(oldConfigInfo.getId()); + addConfigTagsRelation(oldConfigInfo.getId(), configTags, configInfo.getDataId(), + configInfo.getGroup(), configInfo.getTenant()); + } + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:256,259c + insertConfigHistoryAtomic(oldConfigInfo.getId(), oldConfigInfo, srcIp, srcUser, time, "U"); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:270c +3:270c + return Boolean.TRUE; +====1 +1:261c + return Boolean.TRUE; +2:271a +3:271a +====1 +1:268c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); +2:277a +3:277a +====1 +1:270,275c + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + try { + jt.update( + "UPDATE config_info_beta SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5, srcIp, srcUser, + time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp); +2:279,300c +3:279,300c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(configInfo.getDataId())) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(configInfo.getDataId())); + } + if (StringUtils.isNotBlank(configInfo.getGroup())) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(configInfo.getGroup())); + } + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenantTmp)); + } + ConfigInfoBetaEntity configInfoBeta = configInfoBetaRepository.findOne(booleanBuilder).orElse(null); + try { + String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoBeta.setAppName(appNameTmp); + configInfoBeta.setContent(configInfo.getContent()); + configInfoBeta.setMd5(md5); + configInfoBeta.setSrcIp(srcIp); + configInfoBeta.setSrcUser(srcUser); + configInfoBetaRepository.save(configInfoBeta); +====1 +1:277c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:302c +3:302c + log.error("[db-error] " + e.toString(), e); +====1 +1:288,293c + try { + String md5 = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + jt.update( + "UPDATE config_info_tag SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE " + + "data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", configInfo.getContent(), md5, + srcIp, srcUser, time, appNameTmp, configInfo.getDataId(), configInfo.getGroup(), tenantTmp, tagTmp); +2:313,331c +3:313,331c + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + ConfigInfoTagEntity configInfoTag = configInfoTagRepository.findOne( + qConfigInfoTag.dataId.eq(configInfo.getDataId()).and(qConfigInfoTag.groupId.eq(configInfo.getGroup())) + .and(qConfigInfoTag.tenantId.eq(tenantTmp)).and(qConfigInfoTag.tagId.eq(tagTmp))) + .orElse(new ConfigInfoTagEntity()); + try { + configInfoTag.setDataId(configInfo.getDataId()); + configInfoTag.setGroupId(configInfo.getGroup()); + configInfoTag.setTenantId(tenantTmp); + configInfoTag.setTagId(tag); + configInfoTag.setAppName(appNameTmp); + configInfoTag.setContent(configInfo.getContent()); + String md5 = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + configInfoTag.setMd5(md5); + configInfoTag.setGmtCreate(time); + configInfoTag.setGmtModified(time); + configInfoTag.setSrcUser(srcUser); + configInfoTag.setSrcIp(srcIp); + configInfoTagRepository.save(configInfoTag); +====1 +1:295c + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:333c +3:333c + log.error("[db-error] " + e.toString(), e); +====1 +1:323,330c + try { + jt.update( + "UPDATE config_info SET md5 = ? WHERE data_id=? AND group_id=? AND tenant_id=? AND gmt_modified=?", + md5, dataId, group, tenantTmp, lastTime); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:361,378c +3:361,378c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (lastTime != null) { + booleanBuilder.and(qConfigInfo.gmtModified.eq(lastTime)); + } + configInfoRepository.findOne(booleanBuilder).ifPresent(config -> { + config.setMd5(md5); + configInfoRepository.save(config); + }); +====1 +1:416,421c + tjt.execute(status -> { + try { + ConfigInfo configInfo = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo != null) { + jt.update("DELETE FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, + group, tenantTmp); +2:464,474c +3:464,474c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + ConfigInfo4Beta configInfo4Beta = findConfigInfo4Beta(dataId, group, tenant); + if (configInfo4Beta != null) { + configInfoBetaRepository.deleteById(configInfo4Beta.getId()); + } + } catch (CannotGetJdbcConnectionException e) { + log.error("[db-error] " + e.toString(), e); + throw e; +====1 +1:423,425c + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:476c +3:476c + return Boolean.TRUE; +====1 +1:427c + return Boolean.TRUE; +2:477a +3:477a +====1 +1:439,442c + String select = "SELECT content FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + String insert = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + String update = "UPDATE config_info_aggr SET content = ? , gmt_modified = ? WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND datum_id = ?"; + +2:489,502c +3:489,502c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } +====1 +1:445,446c + String dbContent = jt + .queryForObject(select, new Object[] {dataId, group, tenantTmp, datumId}, String.class); +2:505c +3:505c + ConfigInfoAggrEntity result = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); +====1 +1:448c + if (dbContent != null && dbContent.equals(content)) { +2:507c +3:507c + if (result.getContent() != null && result.getContent().equals(content)) { +====1 +1:451c + return jt.update(update, content, now, dataId, group, tenantTmp, datumId) > 0; +2:510,513c +3:510,513c + result.setContent(content); + result.setGmtModified(now); + configInfoAggrRepository.save(result); + return true; +====1 +1:454c + return jt.update(insert, dataId, group, tenantTmp, datumId, appNameTmp, content, now) > 0; +2:516,526c +3:516,526c + ConfigInfoAggrEntity configInfoAggrEntity = new ConfigInfoAggrEntity(); + configInfoAggrEntity.setDataId(dataId); + configInfoAggrEntity.setGroupId(group); + configInfoAggrEntity.setDatumId(datumId); + configInfoAggrEntity.setContent(content); + configInfoAggrEntity.setGmtModified(now); + configInfoAggrEntity.setAppName(appNameTmp); + configInfoAggrEntity.setTenantId(tenantTmp); + configInfoAggrRepository.save(configInfoAggrEntity); + return true; + +====1 +1:465,466c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; +2:536a +3:536a +====1 +1:468,482c + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index++, tenantTmp); + ps.setString(index, datumId); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:537a +3:537a +====1 +1:487,502c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "DELETE FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=?"; + + try { + this.jt.update(sql, new PreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps) throws SQLException { + int index = 1; + ps.setString(index++, dataId); + ps.setString(index++, group); + ps.setString(index, tenantTmp); + } + }); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:542,545c +3:542,545c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); +====1 +1:503a +2:547,553c +3:547,553c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenant)); + } + configInfoAggrRepository.findOne(booleanBuilder).ifPresent(aggr -> configInfoAggrRepository.delete(aggr)); +====1 +1:509,523c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final StringBuilder datumString = new StringBuilder(); + for (String datum : datumList) { + datumString.append("'").append(datum).append("',"); + } + datumString.deleteCharAt(datumString.length() - 1); + final String sql = + "delete from config_info_aggr where data_id=? and group_id=? and tenant_id=? and datum_id in (" + + datumString.toString() + ")"; + try { + jt.update(sql, dataId, group, tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:558a +3:558a +====1 +1:529,536c + String sql = "delete from his_config_info where gmt_modified < ? limit ?"; + PaginationHelper helper = createPaginationHelper(); + try { + helper.updateLimit(sql, new Object[] {startTime, limitSize}); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:564,567c +3:564,567c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository + .findAll(qHisConfigInfo.gmtModified.lt(startTime), PageRequest.of(0, limitSize)); + hisConfigInfoRepository.deleteAll(iterable); +====1 +1:541,542c + String sql = "SELECT COUNT(*) FROM his_config_info WHERE gmt_modified < ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {startTime}); +2:572,573c +3:572,573c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Long result = hisConfigInfoRepository.count(qHisConfigInfo.gmtModified.lt(startTime)); +====1 +1:551c + String sql = "SELECT max(id) FROM config_info"; +2:581a +3:581a +====1 +1:553c + return jt.queryForObject(sql, Long.class); +2:583,584c +3:583,584c + //TODO 关系型特性查询 + return configInfoRepository.findConfigMaxId(); +====1 +1:591,617c + try { + Boolean isReplaceOk = tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus status) { + try { + String appNameTmp = appName == null ? "" : appName; + removeAggrConfigInfo(dataId, group, tenant); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "INSERT INTO config_info_aggr(data_id, group_id, tenant_id, datum_id, app_name, content, gmt_modified) VALUES(?,?,?,?,?,?,?) "; + for (Map.Entry datumEntry : datumMap.entrySet()) { + jt.update(sql, dataId, group, tenantTmp, datumEntry.getKey(), appNameTmp, + datumEntry.getValue(), new Timestamp(System.currentTimeMillis())); + } + } catch (Throwable e) { + throw new TransactionSystemException("error in addAggrConfigInfo"); + } + return Boolean.TRUE; + } + }); + if (isReplaceOk == null) { + return false; + } + return isReplaceOk; + } catch (TransactionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + return false; + } +2:622c +3:622c + return true; +====1 +1:624,636c + String sql = "SELECT DISTINCT data_id, group_id FROM config_info"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:629c +3:629c + return null; +====1 +1:641,651c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,beta_ips FROM config_info_beta WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO4BETA_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:634,647c +3:634,647c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoBetaEntity qConfigInfoBeta = QConfigInfoBetaEntity.configInfoBetaEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoBeta.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoBeta.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoBeta.tenantId.eq(tenant)); + } + ConfigInfoBetaEntity configInfoBetaEntity = configInfoBetaRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoBeta data null")); + return ConfigInfo4BetaMapStruct.INSTANCE.convertConfigInfo4Beta(configInfoBetaEntity); +====1 +1:659,668c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,tag_id,app_name,content FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", + new Object[] {dataId, group, tenantTmp, tagTmp}, CONFIG_INFO4TAG_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:655,671c +3:655,671c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + ConfigInfoTagEntity result = configInfoTagRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfoTag data null")); + return ConfigInfo4TagMapStruct.INSTANCE.convertConfigInfo4Tag(result); +====1 +1:674,684c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=? AND app_name=?", + new Object[] {dataId, group, tenantTmp, appName}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:677c +3:677c + return null; +====1 +1:690,733c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(group); + paramList.add(tenantTmp); + + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and group_id=? and tenant_id=? "); + if (StringUtils.isNotBlank(configTags)) { + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.group_id=? and a.tenant_id=? "); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sql.append(", "); + } + sql.append("?"); + paramList.add(tagArr[i]); + } + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return this.jt.queryForObject(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:683c +3:683c + return null; +====1 +1:739,748c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,content FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, StringUtils.EMPTY}, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:689,695c +3:689,695c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + return configInfoRepository.findOne(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group))).map(s -> { + ConfigInfoBase configInfoBase = new ConfigInfoBase(); + BeanUtils.copyProperties(s, configInfoBase); + configInfoBase.setGroup(s.getGroupId()); + return configInfoBase; + }).orElse(null); +====1 +1:753,762c + try { + return this.jt + .queryForObject("SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE ID=?", + new Object[] {id}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:700,701c +3:700,701c + + return null; +====1 +1:767,776c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5,type FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null. + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:706,712c +3:706,712c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:777a +2:714,718c +3:714,718c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity result = configInfoRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfo(result); +====1 +1:783,792c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:724,725c +3:724,725c + + return null; +====1 +1:798,807c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? and app_name=?", + new Object[] {dataId, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:731,732c +3:731,732c + + return null; +====1 +1:813,864c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where data_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where data_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(dataId); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.data_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:738c +3:738c + return null; +====1 +1:870,871c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); +2:743a +3:743a +====1 +1:874,922c + String sqlCount = "select count(*) from config_info"; + String sql = "select ID,data_id,group_id,tenant_id,app_name,content,type from config_info"; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id"; + sql = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id"; + + where.append(" a.tenant_id=? "); + + if (StringUtils.isNotBlank(dataId)) { + where.append(" and a.data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and a.group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and a.app_name=? "); + paramList.add(appName); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id=? "); + if (StringUtils.isNotBlank(dataId)) { + where.append(" and data_id=? "); + paramList.add(dataId); + } + if (StringUtils.isNotBlank(group)) { + where.append(" and group_id=? "); + paramList.add(group); + } + if (StringUtils.isNotBlank(appName)) { + where.append(" and app_name=? "); + paramList.add(appName); + } +2:746,765c +3:746,765c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; + } + + private void buildConfigInfoCommonCondition(BooleanBuilder booleanBuilder, QConfigInfoEntity qConfigInfo, + final String dataId, final String group, final String appName) { + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); +====1 +1:924,929c + try { + return helper.fetchPage(sqlCount + where, sql + where, paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:767,771c +3:767,771c + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(appName)) { + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:935,943c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where data_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where data_id=? and tenant_id=?", + new Object[] {dataId, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:777c +3:777c + return null; +====1 +1:949,958c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, tenantTmp}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:783c +3:783c + return null; +====1 +1:964,973c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=? and app_name =?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? and app_name =?", + new Object[] {group, tenantTmp, appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:789c +3:789c + return null; +====1 +1:979,1032c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder( + "select count(*) from config_info where group_id=? and tenant_id=? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where group_id=? and tenant_id=? "); + List paramList = new ArrayList(); + paramList.add(group); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.group_id=? and a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:795c +3:795c + return null; +====1 +1:1038,1047c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where tenant_id like ? and app_name=?", + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? and app_name=?", + new Object[] {generateLikeArgument(tenantTmp), appName}, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:801c +3:801c + return null; +====1 +1:1053,1104c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + PaginationHelper helper = createPaginationHelper(); + final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); + final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); + StringBuilder sqlCount = new StringBuilder("select count(*) from config_info where tenant_id like ? "); + StringBuilder sql = new StringBuilder( + "select ID,data_id,group_id,tenant_id,app_name,content from config_info where tenant_id like ? "); + List paramList = new ArrayList(); + paramList.add(tenantTmp); + if (StringUtils.isNotBlank(configTags)) { + sqlCount = new StringBuilder( + "select count(*) from config_info a left join config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sql = new StringBuilder( + "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join " + + "config_tags_relation b on a.id=b.id where a.tenant_id=? "); + + sqlCount.append(" and b.tag_name in ("); + sql.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + sqlCount.append(", "); + sql.append(", "); + } + sqlCount.append("?"); + sql.append("?"); + paramList.add(tagArr[i]); + } + sqlCount.append(") "); + sql.append(") "); + + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and a.app_name=? "); + sql.append(" and a.app_name=? "); + paramList.add(appName); + } + } else { + if (StringUtils.isNotBlank(appName)) { + sqlCount.append(" and app_name=? "); + sql.append(" and app_name=? "); + paramList.add(appName); + } + } + + try { + return helper.fetchPage(sqlCount.toString(), sql.toString(), paramList.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:807c +3:807c + return null; +====1 +1:1110,1118c + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage("select count(*) from config_info where group_id=? and tenant_id=?", + "select ID,data_id,group_id,content from config_info where group_id=? and tenant_id=?", + new Object[] {group, StringUtils.EMPTY}, pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:813c +3:813c + return null; +====1 +1:1123,1124c + String sql = " SELECT COUNT(ID) FROM config_info "; + Integer result = jt.queryForObject(sql, Integer.class); +2:818c +3:818c + Long result = configInfoRepository.count(); +====1 +1:1133,1134c + String sql = " SELECT COUNT(ID) FROM config_info where tenant_id like ?"; + Integer result = jt.queryForObject(sql, new Object[] {tenant}, Integer.class); +2:827,828c +3:827,828c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.tenantId.like(tenant)); +====1 +1:1143,1144c + String sql = " SELECT COUNT(ID) FROM config_info_beta "; + Integer result = jt.queryForObject(sql, Integer.class); +2:837c +3:837c + Long result = configInfoBetaRepository.count(); +====1 +1:1153,1154c + String sql = " SELECT COUNT(ID) FROM config_info_tag "; + Integer result = jt.queryForObject(sql, Integer.class); +2:846c +3:846c + Long result = configInfoTagRepository.count(); +====1 +1:1162,1165c + public List getTenantIdList(int page, int pageSize) { + String sql = "SELECT tenant_id FROM config_info WHERE tenant_id != '' GROUP BY tenant_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:854,864c +3:854,864c + public List getTenantIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("tenantId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1169,1172c + public List getGroupIdList(int page, int pageSize) { + String sql = "SELECT group_id FROM config_info WHERE tenant_id ='' GROUP BY group_id LIMIT ?, ?"; + int from = (page - 1) * pageSize; + return jt.queryForList(sql, String.class, from, pageSize); +2:868,878c +3:868,878c + public List getGroupIdList(int pageNo, int pageSize) { + Specification specification = new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, + CriteriaBuilder criteriaBuilder) { + return query.groupBy(root.get("groupId")).getRestriction(); + } + }; + org.springframework.data.domain.Page page = configInfoRepository + .findAll(specification, PageRequest.of(pageNo, pageSize)); + return page.getContent().stream().map(config -> config.getGroupId()).collect(Collectors.toList()); +====1 +1:1178,1179c + String sql = " SELECT COUNT(ID) FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND tenant_id = ?"; + Integer result = jt.queryForObject(sql, Integer.class, new Object[] {dataId, group, tenantTmp}); +2:884,886c +3:884,886c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Long result = configInfoRepository.count(qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))); +====1 +1:1188,1213c + if (datumIds == null || datumIds.isEmpty()) { + return 0; + } + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder sql = new StringBuilder( + " SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ? and datum_id"); + if (isIn) { + sql.append(" in ("); + } else { + sql.append(" not in ("); + } + for (int i = 0, size = datumIds.size(); i < size; i++) { + if (i > 0) { + sql.append(", "); + } + sql.append("?"); + } + sql.append(")"); + + List objectList = Lists.newArrayList(dataId, group, tenantTmp); + objectList.addAll(datumIds); + Integer result = jt.queryForObject(sql.toString(), Integer.class, objectList.toArray()); + if (result == null) { + throw new IllegalArgumentException("aggrConfigInfoCount error"); + } + return result.intValue(); +2:895c +3:895c + return 0; +====1 +1:1228,1242c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5 " + + " FROM ( SELECT id FROM config_info WHERE tenant_id like ? ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, + new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:910c +3:910c + return null; +====1 +1:1247,1282c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String select = " SELECT data_id,group_id,app_name FROM ( " + + " SELECT id FROM config_info WHERE tenant_id LIKE ? ORDER BY id LIMIT ?, ? )" + + " g, config_info t WHERE g.id = t.id "; + + final int totalCount = configInfoCount(tenant); + int pageCount = totalCount / pageSize; + if (totalCount > pageSize * pageCount) { + pageCount++; + } + + if (pageNo > pageCount) { + return null; + } + + final Page page = new Page(); + page.setPageNumber(pageNo); + page.setPagesAvailable(pageCount); + page.setTotalCount(totalCount); + + try { + List result = jt + .query(select, new Object[] {generateLikeArgument(tenantTmp), (pageNo - 1) * pageSize, pageSize}, + // new Object[0], + CONFIG_KEY_ROW_MAPPER); + + for (ConfigKey item : result) { + page.getPageItems().add(item); + } + return page; + } catch (EmptyResultDataAccessException e) { + return page; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:915c +3:915c + return null; +====1 +1:1288,1300c + String sqlCountRows = "SELECT COUNT(*) FROM config_info"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,content,md5" + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) " + + " g, config_info t WHERE g.id = t.id "; + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:921c +3:921c + return null; +====1 +1:1305,1319c + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = " SELECT t.id,type,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info ORDER BY id LIMIT ?,? )" + + " g, config_info t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + + List params = new ArrayList(); + + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, params.toArray(), pageNo, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:926c +3:926c + return null; +====1 +1:1324,1332c + String select = "SELECT id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,type from config_info where id > ? order by id asc limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(select, new Object[] {lastMaxId, 0, pageSize}, 1, pageSize, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:931,940c +3:931,940c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(qConfigInfo.id.gt(lastMaxId), PageRequest.of(0, pageSize, Sort.by(Sort.Order.asc("id")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1337,1349c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_beta"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,beta_ips " + + " FROM ( SELECT id FROM config_info_beta ORDER BY id LIMIT ?,? )" + + " g, config_info_beta t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_BETA_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:945,952c +3:945,952c + org.springframework.data.domain.Page sPage = configInfoBetaRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoBetaWrapperMapStruct.INSTANCE.convertConfigInfoBetaWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1354,1366c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_tag"; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,tag_id,app_name,content,md5,gmt_modified " + + " FROM ( SELECT id FROM config_info_tag ORDER BY id LIMIT ?,? ) " + + "g, config_info_tag t WHERE g.id = t.id "; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_TAG_WRAPPER_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:957,964c +3:957,964c + org.springframework.data.domain.Page sPage = configInfoTagRepository + .findAll(null, PageRequest.of(pageNo, pageSize)); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoTagWrapperMapStruct.INSTANCE.convertConfigInfoTagWrapperList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1372,1414c + // assert dataids group not null + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + // if dataids empty return empty list + if (CollectionUtils.isEmpty(dataIds)) { + return Collections.emptyList(); + } + + // Batch query limit + // The number of in is controlled within 100, the shorter the length of the SQL statement, the better + if (subQueryLimit > QUERY_LIMIT_SIZE) { + subQueryLimit = 50; + } + List result = new ArrayList(dataIds.size()); + + String sqlStart = "select data_id, group_id, tenant_id, app_name, content from config_info where group_id = ? and tenant_id = ? and data_id in ("; + String sqlEnd = ")"; + StringBuilder subQuerySql = new StringBuilder(); + + for (int i = 0; i < dataIds.size(); i += subQueryLimit) { + // dataids + List params = new ArrayList( + dataIds.subList(i, i + subQueryLimit < dataIds.size() ? i + subQueryLimit : dataIds.size())); + + for (int j = 0; j < params.size(); j++) { + subQuerySql.append("?"); + if (j != params.size() - 1) { + subQuerySql.append(","); + } + } + + // group + params.add(0, group); + params.add(1, tenantTmp); + + List r = this.jt + .query(sqlStart + subQuerySql.toString() + sqlEnd, params.toArray(), CONFIG_INFO_ROW_MAPPER); + + // assert not null + if (r != null && r.size() > 0) { + result.addAll(r); + } + } + return result; +2:970c +3:970c + return null; +====1 +1:1420,1463c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + if (StringUtils.isBlank(appName)) { + return this.findAllConfigInfo(pageNo, pageSize, tenantTmp); + } else { + return this.findConfigInfoByApp(pageNo, pageSize, tenantTmp, appName); + } + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + where += " and tenant_id like ? "; + params.add(generateLikeArgument(tenantTmp)); + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:976c +3:976c + return null; +====1 +1:1469,1562c + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + for (ConfigKey configInfo : configKeys) { + String dataId = configInfo.getDataId(); + String group = configInfo.getGroup(); + String appName = configInfo.getAppName(); + + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + return helper.fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:982c +3:982c + return null; +====1 +1:1572,1636c + PaginationHelper helper = createPaginationHelper(); + String sqlCountRows = "select count(*) from config_info"; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,app_name,content from config_info"; + StringBuilder where = new StringBuilder(" where "); + List params = new ArrayList(); + params.add(generateLikeArgument(tenantTmp)); + if (StringUtils.isNotBlank(configTags)) { + sqlCountRows = "select count(*) from config_info a left join config_tags_relation b on a.id=b.id "; + sqlFetchRows = "select a.ID,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content from config_info a left join config_tags_relation b on a.id=b.id "; + + where.append(" a.tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and a.data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and a.group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and a.app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and a.content like ? "); + params.add(generateLikeArgument(content)); + } + + where.append(" and b.tag_name in ("); + String[] tagArr = configTags.split(","); + for (int i = 0; i < tagArr.length; i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + params.add(tagArr[i]); + } + where.append(") "); + } else { + where.append(" tenant_id like ? "); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where.append(" and group_id like ? "); + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(appName)) { + where.append(" and app_name = ? "); + params.add(appName); + } + if (!StringUtils.isBlank(content)) { + where.append(" and content like ? "); + params.add(generateLikeArgument(content)); + } + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:992,1008c +3:992,1008c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + buildConfigInfoCommonCondition(booleanBuilder, qConfigInfo, dataId, group, appName); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.like(tenant)); + } + if (StringUtils.isNotBlank(content)) { + booleanBuilder.and(qConfigInfo.content.like(content)); + } + org.springframework.data.domain.Page sPage = configInfoRepository + .findAll(booleanBuilder, PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("gmtCreate")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1642,1672c + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group)) { + throw new IOException("invalid param"); + } + + PaginationHelper helper = createPaginationHelper(); + + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select ID,data_id,group_id,tenant_id,content from config_info where "; + String where = " 1=1 and tenant_id='' "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + if (!StringUtils.isBlank(content)) { + where += " and content like ? "; + params.add(generateLikeArgument(content)); + } + + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + CONFIG_INFO_BASE_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1014c +3:1014c + return null; +====1 +1:1678,1691c + String sql = "SELECT id,data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? AND datum_id=?"; + + try { + return this.jt + .queryForObject(sql, new Object[] {dataId, group, tenantTmp, datumId}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + // EmptyResultDataAccessException, indicating that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); +2:1020,1026c +3:1020,1026c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + BooleanBuilder booleanBuilder = new BooleanBuilder(); + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoAggr.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoAggr.groupId.eq(group)); +====1 +1:1692a +2:1028,1036c +3:1028,1036c + if (StringUtils.isNotBlank(tenantTmp)) { + booleanBuilder.and(qConfigInfoAggr.tenantId.eq(tenantTmp)); + } + if (StringUtils.isNotBlank(datumId)) { + booleanBuilder.and(qConfigInfoAggr.datumId.eq(datumId)); + } + + ConfigInfoAggrEntity configInfoAggrEntity = configInfoAggrRepository.findOne(booleanBuilder).orElse(null); + return ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggr(configInfoAggrEntity); +====1 +1:1697,1710c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sql = "SELECT data_id,group_id,tenant_id,datum_id,app_name,content FROM config_info_aggr WHERE data_id=? AND group_id=? AND tenant_id=? ORDER BY datum_id"; + + try { + return this.jt.query(sql, new Object[] {dataId, group, tenantTmp}, CONFIG_INFO_AGGR_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1041c +3:1041c + return null; +====1 +1:1717,1730c + String sqlCountRows = "SELECT COUNT(*) FROM config_info_aggr WHERE data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where data_id=? and " + + "group_id=? and tenant_id=? order by datum_id limit ?,?"; + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPageLimit(sqlCountRows, new Object[] {dataId, group, tenantTmp}, sqlFetchRows, + new Object[] {dataId, group, tenantTmp, (pageNo - 1) * pageSize, pageSize}, pageNo, pageSize, + CONFIG_INFO_AGGR_ROW_MAPPER); + + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1048,1058c +3:1048,1058c + QConfigInfoAggrEntity qConfigInfoAggr = QConfigInfoAggrEntity.configInfoAggrEntity; + org.springframework.data.domain.Page sPage = configInfoAggrRepository.findAll( + qConfigInfoAggr.dataId.eq(dataId).and(qConfigInfoAggr.groupId.eq(group)) + .and(qConfigInfoAggr.tenantId.eq(tenantTmp)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.by("datumId")))); + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigInfoAggrMapStruct.INSTANCE.convertConfigInfoAggrList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); + return page; +====1 +1:1737,1831c + String sqlCountRows = "select count(*) from config_info_aggr where "; + String sqlFetchRows = "select data_id,group_id,tenant_id,datum_id,app_name,content from config_info_aggr where "; + StringBuilder where = new StringBuilder(" 1=1 "); + // Whitelist, please leave the synchronization condition empty, there is no configuration that meets the conditions + if (configKeys.length == 0 && blacklist == false) { + Page page = new Page(); + page.setTotalCount(0); + return page; + } + PaginationHelper helper = createPaginationHelper(); + List params = new ArrayList(); + boolean isFirst = true; + + for (ConfigKey configInfoAggr : configKeys) { + String dataId = configInfoAggr.getDataId(); + String group = configInfoAggr.getGroup(); + String appName = configInfoAggr.getAppName(); + if (StringUtils.isBlank(dataId) && StringUtils.isBlank(group) && StringUtils.isBlank(appName)) { + break; + } + if (blacklist) { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" and "); + } + + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id not like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" group_id not like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" or "); + } + where.append(" app_name != ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } else { + if (isFirst) { + isFirst = false; + where.append(" and "); + } else { + where.append(" or "); + } + where.append("("); + boolean isFirstSub = true; + if (!StringUtils.isBlank(dataId)) { + where.append(" data_id like ? "); + params.add(generateLikeArgument(dataId)); + isFirstSub = false; + } + if (!StringUtils.isBlank(group)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" group_id like ? "); + params.add(generateLikeArgument(group)); + isFirstSub = false; + } + if (!StringUtils.isBlank(appName)) { + if (!isFirstSub) { + where.append(" and "); + } + where.append(" app_name = ? "); + params.add(appName); + isFirstSub = false; + } + where.append(") "); + } + } + + try { + Page result = helper + .fetchPage(sqlCountRows + where.toString(), sqlFetchRows + where.toString(), params.toArray(), + pageNo, pageSize, CONFIG_INFO_AGGR_ROW_MAPPER); + return result; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1065c +3:1065c + return null; +====1 +1:1836,1848c + String sql = "SELECT DISTINCT data_id, group_id, tenant_id FROM config_info_aggr"; + + try { + return jt.query(sql, new Object[] {}, CONFIG_INFO_CHANGED_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1070,1071c +3:1070,1071c + List list = configInfoAggrRepository.findAllAggrGroup(); + return ConfigInfoChangedMapStruct.INSTANCE.convertConfigInfoChangedList(list); +====1 +1:1853,1864c + String sql = "SELECT datum_id FROM config_info_aggr WHERE data_id = ? AND group_id = ? AND content = ? "; + + try { + return this.jt.queryForList(sql, new Object[] {dataId, groupId, content}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1076c +3:1076c + return null; +====1 +1:1869,1877c + try { + List> list = jt.queryForList( + "SELECT data_id, group_id, tenant_id, app_name, content, gmt_modified FROM config_info WHERE gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertChangeConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1081,1084c +3:1081,1084c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + Iterable iterable = configInfoRepository + .findAll(qConfigInfo.gmtModified.goe(startTime).and(qConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapperList((List) iterable); +====1 +1:1884,1924c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from config_info where "; + String sqlFetchRows = "select id,data_id,group_id,tenant_id,app_name,content,type,md5,gmt_modified from config_info where "; + String where = " 1=1 "; + List params = new ArrayList(); + + if (!StringUtils.isBlank(dataId)) { + where += " and data_id like ? "; + params.add(generateLikeArgument(dataId)); + } + if (!StringUtils.isBlank(group)) { + where += " and group_id like ? "; + params.add(generateLikeArgument(group)); + } + + if (!StringUtils.isBlank(tenantTmp)) { + where += " and tenant_id = ? "; + params.add(tenantTmp); + } + + if (!StringUtils.isBlank(appName)) { + where += " and app_name = ? "; + params.add(appName); + } + if (startTime != null) { + where += " and gmt_modified >=? "; + params.add(startTime); + } + if (endTime != null) { + where += " and gmt_modified <=? "; + params.add(endTime); + } + + PaginationHelper helper = createPaginationHelper(); + try { + return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, + lastMaxId, CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1091c +3:1091c + return null; +====1 +1:1929,1937c + try { + List> list = jt.queryForList( + "SELECT DISTINCT data_id, group_id, tenant_id FROM his_config_info WHERE op_type = 'D' AND gmt_modified >=? AND gmt_modified <= ?", + new Object[] {startTime, endTime}); + return convertDeletedConfig(list); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1096,1100c +3:1096,1100c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + Iterable iterable = hisConfigInfoRepository.findAll( + qHisConfigInfo.opType.eq("D").and(qHisConfigInfo.gmtModified.goe(startTime)) + .and(qHisConfigInfo.gmtModified.loe(endTime))); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList((List) iterable); +====1 +1:1943,1947c + final String appNameTmp = + StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + final String tenantTmp = + StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + +2:1105a +3:1105a +====1 +1:1953,1960c + + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); + + KeyHolder keyHolder = new GeneratedKeyHolder(); + + final String sql = + "INSERT INTO config_info(data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_create," + + "gmt_modified,c_desc,c_use,effect,type,c_schema) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; +2:1111,1120c +3:1111,1120c + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setGmtCreate(time); + configInfoEntity.setGmtModified(time); +====1 +1:1963,1991c + jt.update(new PreparedStatementCreator() { + @Override + public PreparedStatement createPreparedStatement(Connection connection) throws SQLException { + PreparedStatement ps = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS); + ps.setString(1, configInfo.getDataId()); + ps.setString(2, configInfo.getGroup()); + ps.setString(3, tenantTmp); + ps.setString(4, appNameTmp); + ps.setString(5, configInfo.getContent()); + ps.setString(6, md5Tmp); + ps.setString(7, srcIp); + ps.setString(8, srcUser); + ps.setTimestamp(9, time); + ps.setTimestamp(10, time); + ps.setString(11, desc); + ps.setString(12, use); + ps.setString(13, effect); + ps.setString(14, type); + ps.setString(15, schema); + return ps; + } + }, keyHolder); + Number nu = keyHolder.getKey(); + if (nu == null) { + throw new IllegalArgumentException("insert config_info fail"); + } + return nu.longValue(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); +2:1123,1124c +3:1123,1124c + return configInfoRepository.save(configInfoEntity).getId(); + } catch (Exception e) { +====1 +1:2008,2015c + try { + jt.update( + "INSERT INTO config_tags_relation(id,tag_name,tag_type,data_id,group_id,tenant_id) VALUES(?,?,?,?,?,?)", + configId, tagName, null, dataId, group, tenant); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1141,1147c +3:1141,1147c + ConfigTagsRelationEntity configTagsRelation = new ConfigTagsRelationEntity(); + configTagsRelation.setId(configId); + configTagsRelation.setTagName(tagName); + configTagsRelation.setDataId(dataId); + configTagsRelation.setGroupId(group); + configTagsRelation.setTenantId(tenant); + configTagsRelationRepository.save(configTagsRelation); +====1 +1:2020,2025c + try { + jt.update("DELETE FROM config_tags_relation WHERE id=?", id); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1152c +3:1152c + configTagsRelationRepository.findById(id).ifPresent(s -> configTagsRelationRepository.delete(s)); +====1 +1:2030,2040c + String sql = "SELECT tag_name FROM config_tags_relation WHERE tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1157c +3:1157c + return null; +====1 +1:2045,2055c + String sql = "SELECT tag_name FROM config_tags_relation WHERE data_id=? AND group_id=? AND tenant_id = ? "; + try { + return jt.queryForList(sql, new Object[] {dataId, group, tenant}, String.class); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (IncorrectResultSizeDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1162,1176c +3:1162,1176c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigTagsRelationEntity qConfigTagsRelation = QConfigTagsRelationEntity.configTagsRelationEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigTagsRelation.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigTagsRelation.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigTagsRelation.tenantId.eq(tenant)); + } + Iterable iterable = configTagsRelationRepository.findAll(booleanBuilder); + List result = new ArrayList<>(); + iterable.forEach(s -> result.add(s.getTagName())); + return result; +====1 +1:2061,2067c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + jt.update("DELETE FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", dataId, group, + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1182,1187c +3:1182,1187c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); +====1 +1:2068a +2:1189,1190c +3:1189,1190c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + configInfos.forEach(s -> configInfoRepository.delete(s)); +====1 +1:2076,2077c + StringBuilder sql = new StringBuilder(SQL_DELETE_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1198,1200c +3:1198,1200c + if (StringUtils.isBlank(ids)) { + return; + } +====1 +1:2081,2084c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1203a +3:1203a +====1 +1:2087,2093c + sql.append(") "); + try { + jt.update(sql.toString(), paramList.toArray()); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1206,1219c +3:1206,1219c + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + for (Long id : paramList) { + configInfoRepository.deleteById(id); + } + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2099,2106c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String tagTmp = StringUtils.isBlank(tag) ? StringUtils.EMPTY : tag; + try { + jt.update("DELETE FROM config_info_tag WHERE data_id=? AND group_id=? AND tenant_id=? AND tag_id=?", dataId, + group, tenantTmp, tagTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1225,1234c +3:1225,1234c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoTagEntity qConfigInfoTag = QConfigInfoTagEntity.configInfoTagEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfoTag.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfoTag.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfoTag.tenantId.eq(tenant)); +====1 +1:2107a +2:1236,1251c +3:1236,1251c + if (StringUtils.isNotBlank(tag)) { + booleanBuilder.and(qConfigInfoTag.tagId.eq(tag)); + } + tjt.execute(new TransactionCallback() { + @Override + public Boolean doInTransaction(TransactionStatus transactionStatus) { + try { + Iterable configInfoTags = configInfoTagRepository.findAll(booleanBuilder); + configInfoTags.forEach(s -> configInfoTagRepository.delete(s)); + } catch (Exception e) { + transactionStatus.setRollbackOnly(); + throw e; + } + return Boolean.TRUE; + } + }); +====1 +1:2113,2115c + String appNameTmp = StringUtils.isBlank(configInfo.getAppName()) ? StringUtils.EMPTY : configInfo.getAppName(); + String tenantTmp = StringUtils.isBlank(configInfo.getTenant()) ? StringUtils.EMPTY : configInfo.getTenant(); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), Constants.ENCODE); +2:1257,1258c +3:1257,1258c + ConfigInfoEntity configInfoEntity = ConfigInfoEntityMapStruct.INSTANCE.convertConfigInfoEntity(configInfo); + final String md5Tmp = MD5Utils.md5Hex(configInfo.getContent(), com.alibaba.nacos.api.common.Constants.ENCODE); +====1 +1:2122,2131c + try { + jt.update("UPDATE config_info SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?," + + "app_name=?,c_desc=?,c_use=?,effect=?,type=?,c_schema=? " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", configInfo.getContent(), md5Tmp, srcIp, srcUser, + time, appNameTmp, desc, use, effect, type, schema, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1265,1272c +3:1265,1272c + configInfoEntity.setMd5(md5Tmp); + configInfoEntity.setCDesc(desc); + configInfoEntity.setCUse(use); + configInfoEntity.setEffect(effect); + configInfoEntity.setType(type); + configInfoEntity.setCSchema(schema); + configInfoEntity.setGmtModified(time); + configInfoRepository.save(configInfoEntity); +====1 +1:2139,2140c + StringBuilder sql = new StringBuilder(SQL_FIND_CONFIG_INFO_BY_IDS); + sql.append("id in ("); +2:1279a +3:1279a +====1 +1:2144,2147c + if (i != 0) { + sql.append(", "); + } + sql.append("?"); +2:1282a +3:1282a +====1 +1:2150,2158c + sql.append(") "); + try { + return this.jt.query(sql.toString(), paramList.toArray(), CONFIG_INFO_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1285,1288c +3:1285,1288c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + List list = (List) configInfoRepository + .findAll(qConfigInfo.id.in(paramList)); + return ConfigInfoMapStruct.INSTANCE.convertConfigInfoList2(list); +====1 +1:2163,2176c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAdvanceInfo configAdvance = this.jt.queryForObject( + "SELECT gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info WHERE data_id=? AND group_id=? AND tenant_id=?", + new Object[] {dataId, group, tenantTmp}, CONFIG_ADVANCE_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1293,1314c +3:1293,1314c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAdvanceInfo configAdvance = ConfigAdvanceInfoMapStruct.INSTANCE.convertConfigAdvanceInfo(configInfo); + List configTagList = this.selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2178c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1315a +3:1315a +====1 +1:2180,2185c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1317c +3:1317c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2186a +2:1319c +3:1319c + return configAdvance; +====1 +1:2191,2206c + final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + try { + List configTagList = this.selectTagByConfig(dataId, group, tenant); + ConfigAllInfo configAdvance = this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,md5," + + "gmt_create,gmt_modified,src_user,src_ip,c_desc,c_use,effect,type,c_schema FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_ALL_INFO_ROW_MAPPER); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); + } +2:1324,1346c +3:1324,1346c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.eq(dataId)); + } + if (StringUtils.isNotBlank(group)) { + booleanBuilder.and(qConfigInfo.groupId.eq(group)); + } + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + ConfigInfoEntity configInfo = configInfoRepository.findOne(booleanBuilder) + .orElseThrow(() -> new RuntimeException("find configInfo data null")); + ConfigAllInfo configAdvance = ConfigAllInfoMapStruct.INSTANCE.convertConfigAllInfo(configInfo); + configAdvance.setGroup(configInfo.getGroupId()); + List configTagList = selectTagByConfig(dataId, group, tenant); + if (configTagList != null && !configTagList.isEmpty()) { + StringBuilder configTagsTmp = new StringBuilder(); + for (String configTag : configTagList) { + if (configTagsTmp.length() == 0) { + configTagsTmp.append(configTag); + } else { + configTagsTmp.append(",").append(configTag); +====1 +1:2208c + configAdvance.setConfigTags(configTagsTmp.toString()); +2:1347a +3:1347a +====1 +1:2210,2215c + return configAdvance; + } catch (EmptyResultDataAccessException e) { // Indicates that the data does not exist, returns null + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; +2:1349c +3:1349c + configAdvance.setConfigTags(configTagsTmp.toString()); +====1 +1:2216a +2:1351c +3:1351c + return configAdvance; +====1 +1:2225,2233c + try { + jt.update( + "INSERT INTO his_config_info (id,data_id,group_id,tenant_id,app_name,content,md5,src_ip,src_user,gmt_modified,op_type) " + + "VALUES(?,?,?,?,?,?,?,?,?,?,?)", id, configInfo.getDataId(), configInfo.getGroup(), + tenantTmp, appNameTmp, configInfo.getContent(), md5Tmp, srcIp, srcUser, time, ops); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1360,1373c +3:1360,1373c + HisConfigInfoEntity hisConfigInfo = new HisConfigInfoEntity(); + hisConfigInfo.setId(id); + hisConfigInfo.setDataId(configInfo.getDataId()); + hisConfigInfo.setGroupId(configInfo.getGroup()); + hisConfigInfo.setAppName(appNameTmp); + hisConfigInfo.setContent(configInfo.getContent()); + hisConfigInfo.setMd5(md5Tmp); + hisConfigInfo.setGmtModified(time); + hisConfigInfo.setSrcUser(srcUser); + hisConfigInfo.setSrcIp(srcIp); + hisConfigInfo.setOpType(ops); + hisConfigInfo.setTenantId(tenantTmp); + hisConfigInfo.setGmtCreate(time); + hisConfigInfoRepository.save(hisConfigInfo); +====1 +1:2239,2255c + PaginationHelper helper = createPaginationHelper(); + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + String sqlCountRows = "select count(*) from his_config_info where data_id = ? and group_id = ? and tenant_id = ?"; + String sqlFetchRows = + "select nid,data_id,group_id,tenant_id,app_name,src_ip,src_user,op_type,gmt_create,gmt_modified from his_config_info " + + "where data_id = ? and group_id = ? and tenant_id = ? order by nid desc"; + + Page page = null; + try { + page = helper + .fetchPage(sqlCountRows, sqlFetchRows, new Object[] {dataId, group, tenantTmp}, pageNo, pageSize, + HISTORY_LIST_ROW_MAPPER); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG + .error("[list-config-history] error, dataId:{}, group:{}", new Object[] {dataId, group}, e); + throw e; + } +2:1379,1389c +3:1379,1389c + QHisConfigInfoEntity qHisConfigInfo = QHisConfigInfoEntity.hisConfigInfoEntity; + org.springframework.data.domain.Page sPage = hisConfigInfoRepository.findAll( + qHisConfigInfo.dataId.eq(dataId).and(qHisConfigInfo.groupId.eq(group)) + .and(qHisConfigInfo.tenantId.eq(tenant)), + PageRequest.of(pageNo, pageSize, Sort.by(Sort.Order.desc("nid")))); + + Page page = new Page<>(); + page.setPageNumber(sPage.getNumber()); + page.setPagesAvailable(sPage.getTotalPages()); + page.setPageItems(ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfoList(sPage.getContent())); + page.setTotalCount((int) sPage.getTotalElements()); +====1 +1:2262,2270c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "INSERT INTO app_configdata_relation_subs(data_id,group_id,app_name,gmt_modified) VALUES(?,?,?,?)", + dataId, group, appNameTmp, date); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1395a +3:1395a +====1 +1:2276,2284c + final String appNameTmp = appName == null ? "" : appName; + try { + jt.update( + "UPDATE app_configdata_relation_subs SET gmt_modified=? WHERE data_id=? AND group_id=? AND app_name=?", + time, dataId, group, appNameTmp); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1401c +3:1401c + +==== +1:2289,2300c + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = ?"; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {nid}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[list-config-history] error, nid:{}", new Object[] {nid}, e); + throw e; + } + } + + @Override +2:1406,1411c + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + } + + @Override +3:1406,1424c + HisConfigInfoEntity hisConfigInfoEntity = hisConfigInfoRepository.findById(nid) + .orElseThrow(() -> new RuntimeException("findById hisConfigInfo data null nid=" + nid)); + return ConfigHistoryInfoMapStruct.INSTANCE.convertConfigHistoryInfo(hisConfigInfoEntity); + } + + @Override + public ConfigHistoryInfo detailPreviousConfigHistory(Long id) { + String sqlFetchRows = "SELECT nid,data_id,group_id,tenant_id,app_name,content,md5,src_user,src_ip,op_type,gmt_create,gmt_modified FROM his_config_info WHERE nid = (select max(nid) from his_config_info where id = ?) "; + try { + ConfigHistoryInfo historyInfo = jt + .queryForObject(sqlFetchRows, new Object[] {id}, HISTORY_DETAIL_ROW_MAPPER); + return historyInfo; + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[detail-previous-config-history] error, id:{}", new Object[] {id}, e); + throw e; + } + } + + @Override +====1 +1:2303,2310c + try { + jt.update( + "INSERT INTO tenant_info(kp,tenant_id,tenant_name,tenant_desc,create_source,gmt_create,gmt_modified) VALUES(?,?,?,?,?,?,?)", + kp, tenantId, tenantName, tenantDesc, createResoure, time, time); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1414,1422c +3:1427,1435c + TenantInfoEntity tenantInfo = new TenantInfoEntity(); + tenantInfo.setKp(kp); + tenantInfo.setTenantId(tenantId); + tenantInfo.setTenantName(tenantName); + tenantInfo.setTenantDesc(tenantDesc); + tenantInfo.setCreateSource(createResoure); + tenantInfo.setGmtCreate(time); + tenantInfo.setGmtModified(time); + tenantInfoRepository.save(tenantInfo); +====1 +1:2315,2322c + try { + jt.update( + "UPDATE tenant_info SET tenant_name = ?, tenant_desc = ?, gmt_modified= ? WHERE kp=? AND tenant_id=?", + tenantName, tenantDesc, System.currentTimeMillis(), kp, tenantId); + } catch (DataAccessException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1427,1432c +3:1440,1445c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + tenantInfoRepository.findOne(qTenantInfo.kp.eq(kp).and(qTenantInfo.tenantId.eq(tenantId))).ifPresent(s -> { + s.setTenantName(tenantName); + s.setTenantDesc(tenantDesc); + tenantInfoRepository.save(s); + }); +====1 +1:2327,2338c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=?"; + try { + return this.jt.query(sql, new Object[] {kp}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1437,1438c +3:1450,1451c + List list = tenantInfoRepository.findByKp(kp); + return TenantInfoMapStruct.INSTANCE.convertTenantInfoList(list); +====1 +1:2343,2354c + String sql = "SELECT tenant_id,tenant_name,tenant_desc FROM tenant_info WHERE kp=? AND tenant_id=?"; + try { + return jt.queryForObject(sql, new Object[] {kp, tenantId}, TENANT_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } catch (EmptyResultDataAccessException e) { + return null; + } catch (Exception e) { + LogUtil.FATAL_LOG.error("[db-other-error]" + e.getMessage(), e); + throw new RuntimeException(e); + } +2:1443,1444c +3:1456,1457c + TenantInfoEntity tenantInfoEntity = tenantInfoRepository.findByKpAndTenantId(kp, tenantId); + return TenantInfoMapStruct.INSTANCE.convertTenantInfo(tenantInfoEntity); +====1 +1:2359,2364c + try { + jt.update("DELETE FROM tenant_info WHERE kp=? AND tenant_id=?", kp, tenantId); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1449,1450c +3:1462,1463c + tenantInfoRepository.findOne(QTenantInfoEntity.tenantInfoEntity.tenantId.eq(tenantId) + .and(QTenantInfoEntity.tenantInfoEntity.kp.eq(kp))).ifPresent(s -> tenantInfoRepository.delete(s)); +====1 +1:2418,2431c + String sqlCountRows = " SELECT COUNT(*) FROM config_info "; + String sqlFetchRows = " SELECT t.id,data_id,group_id,tenant_id,app_name,md5,type,gmt_modified FROM " + + "( SELECT id FROM config_info ORDER BY id LIMIT ?,? ) g, config_info t WHERE g.id = t.id"; + PaginationHelper helper = createPaginationHelper(); + try { + Page page = helper + .fetchPageLimit(sqlCountRows, sqlFetchRows, new Object[] {(pageNo - 1) * pageSize, pageSize}, + pageNo, pageSize, CONFIG_INFO_WRAPPER_ROW_MAPPER); + + return page.getPageItems(); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1504c +3:1517c + return null; +====1 +1:2448,2458c + try { + return this.jt.queryForObject( + "SELECT ID,data_id,group_id,tenant_id,app_name,content,type,gmt_modified,md5 FROM config_info " + + "WHERE data_id=? AND group_id=? AND tenant_id=?", new Object[] {dataId, group, tenantTmp}, + CONFIG_INFO_WRAPPER_ROW_MAPPER); + } catch (EmptyResultDataAccessException e) { + return null; + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1521,1525c +3:1534,1538c + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + ConfigInfoEntity result = configInfoRepository.findOne( + qConfigInfo.dataId.eq(dataId).and(qConfigInfo.groupId.eq(group)) + .and(qConfigInfo.tenantId.eq(tenantTmp))).orElse(null); + return ConfigInfoWrapperMapStruct.INSTANCE.convertConfigInfoWrapper(result); +====1 +1:2463,2469c + String sql = String.format("select 1 from %s limit 1", tableName); + try { + jt.queryForObject(sql, Integer.class); + return true; + } catch (Throwable e) { + return false; + } +2:1530c +3:1543c + return true; +====1 +1:2518,2530c + String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; + StringBuilder where = new StringBuilder(" where "); + List paramList = new ArrayList<>(); + if (!CollectionUtils.isEmpty(ids)) { + where.append(" id in ("); + for (int i = 0; i < ids.size(); i++) { + if (i != 0) { + where.append(", "); + } + where.append("?"); + paramList.add(ids.get(i)); + } + where.append(") "); +2:1579,1582c +3:1592,1595c + BooleanBuilder booleanBuilder = new BooleanBuilder(); + QConfigInfoEntity qConfigInfo = QConfigInfoEntity.configInfoEntity; + if (!org.springframework.util.CollectionUtils.isEmpty(ids)) { + booleanBuilder.and(qConfigInfo.id.in(ids)); +====1 +1:2532,2536c + where.append(" tenant_id=? "); + paramList.add(tenantTmp); + if (!StringUtils.isBlank(dataId)) { + where.append(" and data_id like ? "); + paramList.add(generateLikeArgument(dataId)); +2:1584,1588c +3:1597,1601c + if (StringUtils.isNotBlank(tenant)) { + booleanBuilder.and(qConfigInfo.tenantId.eq(tenant)); + } + if (StringUtils.isNotBlank(dataId)) { + booleanBuilder.and(qConfigInfo.dataId.like(dataId)); +====1 +1:2539,2540c + where.append(" and group_id=? "); + paramList.add(group); +2:1591c +3:1604c + booleanBuilder.and(qConfigInfo.groupId.eq(group)); +====1 +1:2543,2544c + where.append(" and app_name=? "); + paramList.add(appName); +2:1594c +3:1607c + booleanBuilder.and(qConfigInfo.appName.eq(appName)); +====1 +1:2547,2552c + try { + return this.jt.query(SQL_FIND_ALL_CONFIG_INFO + where, paramList.toArray(), CONFIG_ALL_INFO_ROW_MAPPER); + } catch (CannotGetJdbcConnectionException e) { + LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e); + throw e; + } +2:1597,1605c +3:1610,1618c + Iterable configInfos = configInfoRepository.findAll(booleanBuilder); + List resultList = new ArrayList<>(); + configInfos.forEach(s -> { + ConfigAllInfo configAllInfo = new ConfigAllInfo(); + BeanUtils.copyProperties(s, configAllInfo); + configAllInfo.setGroup(s.getGroupId()); + resultList.add(configAllInfo); + }); + return resultList; +====1 +1:2643,2647c + Integer result = this.jt + .queryForObject(SQL_TENANT_INFO_COUNT_BY_TENANT_ID, new String[] {tenantId}, Integer.class); + if (result == null) { + return 0; + } +2:1696,1697c +3:1709,1710c + QTenantInfoEntity qTenantInfo = QTenantInfoEntity.tenantInfoEntity; + Long result = tenantInfoRepository.count(qTenantInfo.tenantId.eq(tenantId)); diff --git a/src/python/merge_conflict_analysis_diffs/111/spork/diff_HistoryController.java.txt b/src/python/merge_conflict_analysis_diffs/111/spork/diff_HistoryController.java.txt new file mode 100644 index 0000000000..f20395adc2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/spork/diff_HistoryController.java.txt @@ -0,0 +1,94 @@ +====3 +1:31,33c +2:31,33c + import javax.servlet.http.HttpServletRequest; + import javax.servlet.http.HttpServletResponse; + +3:30a +====2 +1:42c +3:39c + +2:42c + +====2 +1:45c +3:42c + +2:45c + +====3 +1:49,53c +2:49,53c + * @param dataId dataId string value. + * @param group group string value. + * @param tenant tenant string value. + * @param appName appName string value. + * @param pageNo pageNo string value. +3:46,50c + * @param dataId dataId string value. + * @param group group string value. + * @param tenant tenant string value. + * @param appName appName string value. + * @param pageNo pageNo string value. +====2 +1:60,66c +3:57,63c + @RequestParam("group") String group, // + @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant, + @RequestParam(value = "appName", required = false) String appName, + @RequestParam(value = "pageNo", required = false) Integer pageNo, + // + @RequestParam(value = "pageSize", required = false) Integer pageSize, // + ModelMap modelMap) { +2:60,66c + @RequestParam("group") String group, // + @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant, + @RequestParam(value = "appName", required = false) String appName, + @RequestParam(value = "pageNo", required = false) Integer pageNo, + // + @RequestParam(value = "pageSize", required = false) Integer pageSize, // + ModelMap modelMap) { +====2 +1:73c +3:70c + +2:73c + +====3 +1:75c +2:75c + * Query the detailed configuration history informations. +3:72,75c + * Query the detailed configuration history information. + * + * @param nid history_config_info nid + * @return history config info +==== +1:78,79c + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { +2:78,79c + public ConfigHistoryInfo getConfigHistoryInfo(HttpServletRequest request, HttpServletResponse response, + @RequestParam("nid") Long nid, ModelMap modelMap) { +3:78c + public ConfigHistoryInfo getConfigHistoryInfo(@RequestParam("nid") Long nid) { +==== +1:82c + +2:82c + +3:81,93c + + /** + * Query previous config history information. + * + * @param id config_info id + * @return history config info + * @since 1.4.0 + */ + @GetMapping(value = "/previous") + public ConfigHistoryInfo getPreviousConfigHistoryInfo(@RequestParam("id") Long id) { + return persistService.detailPreviousConfigHistory(id); + } + diff --git a/src/python/merge_conflict_analysis_diffs/111/spork/diff_MergeDatumService.java.txt b/src/python/merge_conflict_analysis_diffs/111/spork/diff_MergeDatumService.java.txt new file mode 100644 index 0000000000..154063ccc5 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/spork/diff_MergeDatumService.java.txt @@ -0,0 +1,35 @@ +====3 +1:30,31c +2:30,31c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +3:30,32c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.ApplicationUtils; + import com.alibaba.nacos.sys.utils.InetUtils; +====3 +1:109c +2:109c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIp()); +3:110c + addMergeTask(item.getDataId(), item.getGroup(), item.getTenant(), InetUtils.getSelfIP()); +====3 +1:117c +2:117c + if (ApplicationUtils.getStandaloneMode()) { +3:118c + if (EnvUtil.getStandaloneMode()) { +==== +1:166,168c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn("[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); +2:166,169c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIp(), null); + LOGGER.warn( + "[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); +3:167,169c + persistService.removeConfigInfo(dataId, group, tenant, InetUtils.getSelfIP(), null); + LOGGER.warn("[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + + group); diff --git a/src/python/merge_conflict_analysis_diffs/111/spork/diff_MergeTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/spork/diff_MergeTaskProcessor.java.txt new file mode 100644 index 0000000000..f8190d8c70 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/spork/diff_MergeTaskProcessor.java.txt @@ -0,0 +1,38 @@ +==== +1:20,21c + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:20c + import com.alibaba.nacos.common.task.AbstractDelayTask; +3:20,21c + import com.alibaba.nacos.common.task.NacosTask; + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:22a +2:22c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====3 +1:31c +2:31c + import com.alibaba.nacos.core.utils.InetUtils; +3:32c + import com.alibaba.nacos.sys.utils.InetUtils; +====3 +1:55c +2:55c + public boolean process(AbstractDelayTask task) { +3:56c + public boolean process(NacosTask task) { +====3 +1:87c +2:87c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +3:88c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), +====3 +1:101c +2:101c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIp(), +3:102c + .logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/spork/diff_NotifyTaskProcessor.java.txt b/src/python/merge_conflict_analysis_diffs/111/spork/diff_NotifyTaskProcessor.java.txt new file mode 100644 index 0000000000..b34849e3c8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/spork/diff_NotifyTaskProcessor.java.txt @@ -0,0 +1,58 @@ +==== +1:20,21c + import com.alibaba.nacos.config.server.constant.Constants; + import com.alibaba.nacos.common.task.AbstractDelayTask; +2:20c + import com.alibaba.nacos.common.task.AbstractDelayTask; +3:20,21c + import com.alibaba.nacos.common.task.NacosTask; + import com.alibaba.nacos.config.server.constant.Constants; +====1 +1:22a +2:22c +3:23c + import com.alibaba.nacos.config.server.constant.Constants; +====3 +1:27,28c +2:27,28c + import com.alibaba.nacos.core.utils.ApplicationUtils; + import com.alibaba.nacos.core.utils.InetUtils; +3:28,29c + import com.alibaba.nacos.sys.env.EnvUtil; + import com.alibaba.nacos.sys.utils.InetUtils; +====3 +1:49c +2:49c + public boolean process(AbstractDelayTask task) { +3:50c + public boolean process(NacosTask task) { +====3 +1:76c +2:76c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIp()); +3:77c + NotifyService.NOTIFY_HEADER_OP_HANDLE_IP, InetUtils.getSelfIP()); +====3 +1:78c +2:78c + .format(URL_PATTERN, serverIp, ApplicationUtils.getContextPath(), dataId, group); +3:79c + .format(URL_PATTERN, serverIp, EnvUtil.getContextPath(), dataId, group); +====3 +1:82c +2:82c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +3:83c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====3 +1:92c +2:92c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +3:93c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), +====3 +1:100c +2:100c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIp(), +3:101c + ConfigTraceService.logNotifyEvent(dataId, group, tenant, null, lastModified, InetUtils.getSelfIP(), diff --git a/src/python/merge_conflict_analysis_diffs/111/spork/diff_application.properties.txt b/src/python/merge_conflict_analysis_diffs/111/spork/diff_application.properties.txt new file mode 100644 index 0000000000..9395a60678 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/spork/diff_application.properties.txt @@ -0,0 +1,85 @@ +====3 +1:40,41c +2:40,41c + # db.user=nacos + # db.password=nacos +3:40,41c + # db.user.0=nacos + # db.password.0=nacos +====3 +1:112c +2:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-fe/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +3:112c + nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-ui/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/** +==== +1:177a +2:178,210c + + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + +3:178,211c + + + + #nacos.datasource.type=MYSQL + # + #nacos.datasource.relational.dsList[0].url=jdbc:mysql://localhost:3306/nacos-devtest?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC + #nacos.datasource.relational.dsList[0].username=root + #nacos.datasource.relational.dsList[0].password=root + #nacos.datasource.relational.dsList[0].driver-class-name=com.mysql.jdbc.Driver + #nacos.datasource.relational.dsList[0].hikari.connection-timeout=10000 + #nacos.datasource.relational.dsList[0].hikari.idle-timeout=120000 + #nacos.datasource.relational.dsList[0].hikari.max-lifetime=240000 + #nacos.datasource.relational.dsList[0].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.cachePrepStmts=true + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSize=250 + #nacos.datasource.relational.dsList[0].hikari.data-source-properties.prepStmtCacheSqlLimit=2048 + #nacos.datasource.relational.dsList[0].hikari.connection-test-query=SELECT 1 FROM dual + # + # + # + #nacos.datasource.relational.dsList[1].url=jdbc:mysql://localhost:3306/nacos-devtest?characterEncoding=utf8&connectTimeout=1000&socketTimeout=3000&autoReconnect=true&useUnicode=true&useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true + #nacos.datasource.relational.dsList[1].username=root + #nacos.datasource.relational.dsList[1].password=root + #nacos.datasource.relational.dsList[1].hikari.connection-test-query=SELECT 1 FROM dual + #nacos.datasource.relational.dsList[1].hikari.connection-timeout=60000 + #nacos.datasource.relational.dsList[1].hikari.maximum-pool-size=20 + #nacos.datasource.relational.dsList[1]-enable=false + # + + ## jpa + spring.data.jpa.repositories.enabled=true + spring.jpa.show-sql=true + + diff --git a/src/python/merge_conflict_analysis_diffs/111/spork/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/111/spork/diff_pom.xml.txt new file mode 100644 index 0000000000..25aac30ad6 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/111/spork/diff_pom.xml.txt @@ -0,0 +1,163 @@ +====3 +1:25c +2:25c + 1.4.0-SNAPSHOT +3:25c + 1.4.1-SNAPSHOT +====3 +1:39c +2:39c + nacos-all-1.4.0-SNAPSHOT +3:39c + nacos-all-1.4.1-SNAPSHOT +====3 +1:129c +2:129c + 2.1.16.RELEASE +3:129c + 2.1.17.RELEASE +====3 +1:131c +2:131c + 2.6 +3:130a +====1 +1:133c + 2.2 +2:133c +3:132c + 2.6 +====3 +1:144c +2:144c + 1.7.17 +3:142a +====1 +1:170a +2:171,177c +3:169,175c + 1.3.2.beta1 + 1.3.2.beta1 + 1.3.1.Final + 19.3.0.0 + 4.2.1 + 3.4.1 + 1.18.12 +====1 +1:279a +2:287,289c +3:285,287c + **/com/alibaba/nacos/config/server/modules/entity/*.java + **/com/alibaba/nacos/config/server/modules/mapstruct/*.java + **/com/alibaba/nacos/config/server/configuration/datasource/DynamicDataSource.java +==== +1:307c + **/istio/model/**,**/nacos/test/** +2:317c + **/istio/model/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** +3:315c + **/istio/model/**,**/consistency/entity/**,**/nacos/test/**,**/com/alibaba/nacos/config/server/modules/** +====3 +1:341a +2:351a +3:350c + /console-ui/** +====3 +1:553a +2:563a +3:563,581c + + remove-test-data + + + + org.apache.maven.plugins + maven-clean-plugin + + false + + + ${user.home}/nacos/data + + + + + + + +====3 +1:581a +2:591a +3:610c + sys +====3 +1:688a +2:698a +3:718,722c + + ${project.groupId} + nacos-sys + ${project.version} + +====3 +1:712,717c +2:722,727c + + commons-lang + commons-lang + ${commons-lang.version} + + +3:745a +====3 +1:817,822c +2:827,832c + + com.ning + async-http-client + ${async-http-client.version} + + +3:844a +====1 +1:1027a +2:1038,1075c +3:1050,1087c + + + org.mapstruct + mapstruct-jdk8 + ${mapstruct.version} + + + + org.mapstruct + mapstruct-processor + ${mapstruct.version} + + + + org.projectlombok + lombok + true + ${lombok.version} + + + + com.querydsl + querydsl-jpa + ${querydsl.version} + + + + com.zaxxer + HikariCP + ${hikariCP.version} + + + + com.oracle.ojdbc + ojdbc8 + ${ojdbc.version} + + diff --git a/src/python/merge_conflict_analysis_diffs/1120/git_hires_merge/diff_OSMReaderHelper.java.txt b/src/python/merge_conflict_analysis_diffs/1120/git_hires_merge/diff_OSMReaderHelper.java.txt new file mode 100644 index 0000000000..818c2c2fc8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/git_hires_merge/diff_OSMReaderHelper.java.txt @@ -0,0 +1,50 @@ +====3 +1:46c +2:46c + private long edgeCount; +3:45a +====3 +1:75c +2:75c + } +3:74c + } +====3 +1:78c +2:78c + return edgeCount; +3:77c + return g.getAllEdges().maxId(); +==== +1:86c + public abstract int addEdge(TLongList nodes, int flags); +2:86c + public abstract int addEdge(TLongList nodes, int flags, String name); +3:85c + public abstract void addEdge(TLongList nodes, int flags, String name); +====1 +1:88c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags) { +2:88c +3:87c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags, String name) { +====1 +1:116a +2:117c +3:116c + iter.name(name); +====3 +1:131c +2:132c + void cleanup() { +3:131c + void finishedReading() { +==== +1:141,142c + int successfullAdded = addEdge(wayNodes, flags); + edgeCount += successfullAdded; +2:142,143c + int successfullAdded = addEdge(wayNodes, flags, (String) outProperties.get("wayName")); + edgeCount += successfullAdded; +3:141c + addEdge(wayNodes, flags, (String) outProperties.get("wayName")); diff --git a/src/python/merge_conflict_analysis_diffs/1120/gitmerge_ort/diff_OSMReaderHelper.java.txt b/src/python/merge_conflict_analysis_diffs/1120/gitmerge_ort/diff_OSMReaderHelper.java.txt new file mode 100644 index 0000000000..482ec222ec --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/gitmerge_ort/diff_OSMReaderHelper.java.txt @@ -0,0 +1,57 @@ +====1 +1:46c + private long edgeCount; +2:45a +3:45a +====1 +1:75c + } +2:74c +3:74c + } +====1 +1:78c + return edgeCount; +2:77c +3:77c + return g.getAllEdges().maxId(); +==== +1:86c + public abstract int addEdge(TLongList nodes, int flags); +2:85c + public abstract int addEdge(TLongList nodes, int flags, String name); +3:85c + public abstract void addEdge(TLongList nodes, int flags, String name); +====1 +1:88c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags) { +2:87c +3:87c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags, String name) { +====1 +1:116a +2:116c +3:116c + iter.name(name); +====1 +1:131c + void cleanup() { +2:131c +3:131c + void finishedReading() { +==== +1:141,142c + int successfullAdded = addEdge(wayNodes, flags); + edgeCount += successfullAdded; +2:141,149c + <<<<<<< HEAD + int successfullAdded = addEdge(wayNodes, flags, (String) outProperties.get("wayName")); + edgeCount += successfullAdded; + ||||||| c4079cd49f + int successfullAdded = addEdge(wayNodes, flags); + edgeCount += successfullAdded; + ======= + addEdge(wayNodes, flags); + >>>>>>> TEMP_RIGHT_BRANCH +3:141c + addEdge(wayNodes, flags, (String) outProperties.get("wayName")); diff --git a/src/python/merge_conflict_analysis_diffs/1120/gitmerge_ort_adjacent/diff_OSMReaderHelper.java.txt b/src/python/merge_conflict_analysis_diffs/1120/gitmerge_ort_adjacent/diff_OSMReaderHelper.java.txt new file mode 100644 index 0000000000..818c2c2fc8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/gitmerge_ort_adjacent/diff_OSMReaderHelper.java.txt @@ -0,0 +1,50 @@ +====3 +1:46c +2:46c + private long edgeCount; +3:45a +====3 +1:75c +2:75c + } +3:74c + } +====3 +1:78c +2:78c + return edgeCount; +3:77c + return g.getAllEdges().maxId(); +==== +1:86c + public abstract int addEdge(TLongList nodes, int flags); +2:86c + public abstract int addEdge(TLongList nodes, int flags, String name); +3:85c + public abstract void addEdge(TLongList nodes, int flags, String name); +====1 +1:88c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags) { +2:88c +3:87c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags, String name) { +====1 +1:116a +2:117c +3:116c + iter.name(name); +====3 +1:131c +2:132c + void cleanup() { +3:131c + void finishedReading() { +==== +1:141,142c + int successfullAdded = addEdge(wayNodes, flags); + edgeCount += successfullAdded; +2:142,143c + int successfullAdded = addEdge(wayNodes, flags, (String) outProperties.get("wayName")); + edgeCount += successfullAdded; +3:141c + addEdge(wayNodes, flags, (String) outProperties.get("wayName")); diff --git a/src/python/merge_conflict_analysis_diffs/1120/gitmerge_ort_ignorespace/diff_OSMReaderHelper.java.txt b/src/python/merge_conflict_analysis_diffs/1120/gitmerge_ort_ignorespace/diff_OSMReaderHelper.java.txt new file mode 100644 index 0000000000..bc573adf65 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/gitmerge_ort_ignorespace/diff_OSMReaderHelper.java.txt @@ -0,0 +1,57 @@ +====1 +1:46c + private long edgeCount; +2:45a +3:45a +====3 +1:75c +2:74c + } +3:74c + } +====1 +1:78c + return edgeCount; +2:77c +3:77c + return g.getAllEdges().maxId(); +==== +1:86c + public abstract int addEdge(TLongList nodes, int flags); +2:85c + public abstract int addEdge(TLongList nodes, int flags, String name); +3:85c + public abstract void addEdge(TLongList nodes, int flags, String name); +====1 +1:88c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags) { +2:87c +3:87c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags, String name) { +====1 +1:116a +2:116c +3:116c + iter.name(name); +====1 +1:131c + void cleanup() { +2:131c +3:131c + void finishedReading() { +==== +1:141,142c + int successfullAdded = addEdge(wayNodes, flags); + edgeCount += successfullAdded; +2:141,149c + <<<<<<< HEAD + int successfullAdded = addEdge(wayNodes, flags, (String) outProperties.get("wayName")); + edgeCount += successfullAdded; + ||||||| c4079cd49f + int successfullAdded = addEdge(wayNodes, flags); + edgeCount += successfullAdded; + ======= + addEdge(wayNodes, flags); + >>>>>>> TEMP_RIGHT_BRANCH +3:141c + addEdge(wayNodes, flags, (String) outProperties.get("wayName")); diff --git a/src/python/merge_conflict_analysis_diffs/1120/gitmerge_ort_imports/diff_OSMReaderHelper.java.txt b/src/python/merge_conflict_analysis_diffs/1120/gitmerge_ort_imports/diff_OSMReaderHelper.java.txt new file mode 100644 index 0000000000..818c2c2fc8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/gitmerge_ort_imports/diff_OSMReaderHelper.java.txt @@ -0,0 +1,50 @@ +====3 +1:46c +2:46c + private long edgeCount; +3:45a +====3 +1:75c +2:75c + } +3:74c + } +====3 +1:78c +2:78c + return edgeCount; +3:77c + return g.getAllEdges().maxId(); +==== +1:86c + public abstract int addEdge(TLongList nodes, int flags); +2:86c + public abstract int addEdge(TLongList nodes, int flags, String name); +3:85c + public abstract void addEdge(TLongList nodes, int flags, String name); +====1 +1:88c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags) { +2:88c +3:87c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags, String name) { +====1 +1:116a +2:117c +3:116c + iter.name(name); +====3 +1:131c +2:132c + void cleanup() { +3:131c + void finishedReading() { +==== +1:141,142c + int successfullAdded = addEdge(wayNodes, flags); + edgeCount += successfullAdded; +2:142,143c + int successfullAdded = addEdge(wayNodes, flags, (String) outProperties.get("wayName")); + edgeCount += successfullAdded; +3:141c + addEdge(wayNodes, flags, (String) outProperties.get("wayName")); diff --git a/src/python/merge_conflict_analysis_diffs/1120/gitmerge_ort_imports_ignorespace/diff_OSMReaderHelper.java.txt b/src/python/merge_conflict_analysis_diffs/1120/gitmerge_ort_imports_ignorespace/diff_OSMReaderHelper.java.txt new file mode 100644 index 0000000000..818c2c2fc8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/gitmerge_ort_imports_ignorespace/diff_OSMReaderHelper.java.txt @@ -0,0 +1,50 @@ +====3 +1:46c +2:46c + private long edgeCount; +3:45a +====3 +1:75c +2:75c + } +3:74c + } +====3 +1:78c +2:78c + return edgeCount; +3:77c + return g.getAllEdges().maxId(); +==== +1:86c + public abstract int addEdge(TLongList nodes, int flags); +2:86c + public abstract int addEdge(TLongList nodes, int flags, String name); +3:85c + public abstract void addEdge(TLongList nodes, int flags, String name); +====1 +1:88c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags) { +2:88c +3:87c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags, String name) { +====1 +1:116a +2:117c +3:116c + iter.name(name); +====3 +1:131c +2:132c + void cleanup() { +3:131c + void finishedReading() { +==== +1:141,142c + int successfullAdded = addEdge(wayNodes, flags); + edgeCount += successfullAdded; +2:142,143c + int successfullAdded = addEdge(wayNodes, flags, (String) outProperties.get("wayName")); + edgeCount += successfullAdded; +3:141c + addEdge(wayNodes, flags, (String) outProperties.get("wayName")); diff --git a/src/python/merge_conflict_analysis_diffs/1120/gitmerge_recursive_histogram/diff_OSMReaderHelper.java.txt b/src/python/merge_conflict_analysis_diffs/1120/gitmerge_recursive_histogram/diff_OSMReaderHelper.java.txt new file mode 100644 index 0000000000..482ec222ec --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/gitmerge_recursive_histogram/diff_OSMReaderHelper.java.txt @@ -0,0 +1,57 @@ +====1 +1:46c + private long edgeCount; +2:45a +3:45a +====1 +1:75c + } +2:74c +3:74c + } +====1 +1:78c + return edgeCount; +2:77c +3:77c + return g.getAllEdges().maxId(); +==== +1:86c + public abstract int addEdge(TLongList nodes, int flags); +2:85c + public abstract int addEdge(TLongList nodes, int flags, String name); +3:85c + public abstract void addEdge(TLongList nodes, int flags, String name); +====1 +1:88c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags) { +2:87c +3:87c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags, String name) { +====1 +1:116a +2:116c +3:116c + iter.name(name); +====1 +1:131c + void cleanup() { +2:131c +3:131c + void finishedReading() { +==== +1:141,142c + int successfullAdded = addEdge(wayNodes, flags); + edgeCount += successfullAdded; +2:141,149c + <<<<<<< HEAD + int successfullAdded = addEdge(wayNodes, flags, (String) outProperties.get("wayName")); + edgeCount += successfullAdded; + ||||||| c4079cd49f + int successfullAdded = addEdge(wayNodes, flags); + edgeCount += successfullAdded; + ======= + addEdge(wayNodes, flags); + >>>>>>> TEMP_RIGHT_BRANCH +3:141c + addEdge(wayNodes, flags, (String) outProperties.get("wayName")); diff --git a/src/python/merge_conflict_analysis_diffs/1120/gitmerge_recursive_ignorespace/diff_OSMReaderHelper.java.txt b/src/python/merge_conflict_analysis_diffs/1120/gitmerge_recursive_ignorespace/diff_OSMReaderHelper.java.txt new file mode 100644 index 0000000000..bc573adf65 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/gitmerge_recursive_ignorespace/diff_OSMReaderHelper.java.txt @@ -0,0 +1,57 @@ +====1 +1:46c + private long edgeCount; +2:45a +3:45a +====3 +1:75c +2:74c + } +3:74c + } +====1 +1:78c + return edgeCount; +2:77c +3:77c + return g.getAllEdges().maxId(); +==== +1:86c + public abstract int addEdge(TLongList nodes, int flags); +2:85c + public abstract int addEdge(TLongList nodes, int flags, String name); +3:85c + public abstract void addEdge(TLongList nodes, int flags, String name); +====1 +1:88c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags) { +2:87c +3:87c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags, String name) { +====1 +1:116a +2:116c +3:116c + iter.name(name); +====1 +1:131c + void cleanup() { +2:131c +3:131c + void finishedReading() { +==== +1:141,142c + int successfullAdded = addEdge(wayNodes, flags); + edgeCount += successfullAdded; +2:141,149c + <<<<<<< HEAD + int successfullAdded = addEdge(wayNodes, flags, (String) outProperties.get("wayName")); + edgeCount += successfullAdded; + ||||||| c4079cd49f + int successfullAdded = addEdge(wayNodes, flags); + edgeCount += successfullAdded; + ======= + addEdge(wayNodes, flags); + >>>>>>> TEMP_RIGHT_BRANCH +3:141c + addEdge(wayNodes, flags, (String) outProperties.get("wayName")); diff --git a/src/python/merge_conflict_analysis_diffs/1120/gitmerge_recursive_minimal/diff_OSMReaderHelper.java.txt b/src/python/merge_conflict_analysis_diffs/1120/gitmerge_recursive_minimal/diff_OSMReaderHelper.java.txt new file mode 100644 index 0000000000..482ec222ec --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/gitmerge_recursive_minimal/diff_OSMReaderHelper.java.txt @@ -0,0 +1,57 @@ +====1 +1:46c + private long edgeCount; +2:45a +3:45a +====1 +1:75c + } +2:74c +3:74c + } +====1 +1:78c + return edgeCount; +2:77c +3:77c + return g.getAllEdges().maxId(); +==== +1:86c + public abstract int addEdge(TLongList nodes, int flags); +2:85c + public abstract int addEdge(TLongList nodes, int flags, String name); +3:85c + public abstract void addEdge(TLongList nodes, int flags, String name); +====1 +1:88c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags) { +2:87c +3:87c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags, String name) { +====1 +1:116a +2:116c +3:116c + iter.name(name); +====1 +1:131c + void cleanup() { +2:131c +3:131c + void finishedReading() { +==== +1:141,142c + int successfullAdded = addEdge(wayNodes, flags); + edgeCount += successfullAdded; +2:141,149c + <<<<<<< HEAD + int successfullAdded = addEdge(wayNodes, flags, (String) outProperties.get("wayName")); + edgeCount += successfullAdded; + ||||||| c4079cd49f + int successfullAdded = addEdge(wayNodes, flags); + edgeCount += successfullAdded; + ======= + addEdge(wayNodes, flags); + >>>>>>> TEMP_RIGHT_BRANCH +3:141c + addEdge(wayNodes, flags, (String) outProperties.get("wayName")); diff --git a/src/python/merge_conflict_analysis_diffs/1120/gitmerge_recursive_myers/diff_OSMReaderHelper.java.txt b/src/python/merge_conflict_analysis_diffs/1120/gitmerge_recursive_myers/diff_OSMReaderHelper.java.txt new file mode 100644 index 0000000000..482ec222ec --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/gitmerge_recursive_myers/diff_OSMReaderHelper.java.txt @@ -0,0 +1,57 @@ +====1 +1:46c + private long edgeCount; +2:45a +3:45a +====1 +1:75c + } +2:74c +3:74c + } +====1 +1:78c + return edgeCount; +2:77c +3:77c + return g.getAllEdges().maxId(); +==== +1:86c + public abstract int addEdge(TLongList nodes, int flags); +2:85c + public abstract int addEdge(TLongList nodes, int flags, String name); +3:85c + public abstract void addEdge(TLongList nodes, int flags, String name); +====1 +1:88c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags) { +2:87c +3:87c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags, String name) { +====1 +1:116a +2:116c +3:116c + iter.name(name); +====1 +1:131c + void cleanup() { +2:131c +3:131c + void finishedReading() { +==== +1:141,142c + int successfullAdded = addEdge(wayNodes, flags); + edgeCount += successfullAdded; +2:141,149c + <<<<<<< HEAD + int successfullAdded = addEdge(wayNodes, flags, (String) outProperties.get("wayName")); + edgeCount += successfullAdded; + ||||||| c4079cd49f + int successfullAdded = addEdge(wayNodes, flags); + edgeCount += successfullAdded; + ======= + addEdge(wayNodes, flags); + >>>>>>> TEMP_RIGHT_BRANCH +3:141c + addEdge(wayNodes, flags, (String) outProperties.get("wayName")); diff --git a/src/python/merge_conflict_analysis_diffs/1120/gitmerge_recursive_patience/diff_OSMReaderHelper.java.txt b/src/python/merge_conflict_analysis_diffs/1120/gitmerge_recursive_patience/diff_OSMReaderHelper.java.txt new file mode 100644 index 0000000000..482ec222ec --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/gitmerge_recursive_patience/diff_OSMReaderHelper.java.txt @@ -0,0 +1,57 @@ +====1 +1:46c + private long edgeCount; +2:45a +3:45a +====1 +1:75c + } +2:74c +3:74c + } +====1 +1:78c + return edgeCount; +2:77c +3:77c + return g.getAllEdges().maxId(); +==== +1:86c + public abstract int addEdge(TLongList nodes, int flags); +2:85c + public abstract int addEdge(TLongList nodes, int flags, String name); +3:85c + public abstract void addEdge(TLongList nodes, int flags, String name); +====1 +1:88c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags) { +2:87c +3:87c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags, String name) { +====1 +1:116a +2:116c +3:116c + iter.name(name); +====1 +1:131c + void cleanup() { +2:131c +3:131c + void finishedReading() { +==== +1:141,142c + int successfullAdded = addEdge(wayNodes, flags); + edgeCount += successfullAdded; +2:141,149c + <<<<<<< HEAD + int successfullAdded = addEdge(wayNodes, flags, (String) outProperties.get("wayName")); + edgeCount += successfullAdded; + ||||||| c4079cd49f + int successfullAdded = addEdge(wayNodes, flags); + edgeCount += successfullAdded; + ======= + addEdge(wayNodes, flags); + >>>>>>> TEMP_RIGHT_BRANCH +3:141c + addEdge(wayNodes, flags, (String) outProperties.get("wayName")); diff --git a/src/python/merge_conflict_analysis_diffs/1120/intellimerge/diff_OSMReaderHelper.java.txt b/src/python/merge_conflict_analysis_diffs/1120/intellimerge/diff_OSMReaderHelper.java.txt new file mode 100644 index 0000000000..482ec222ec --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/intellimerge/diff_OSMReaderHelper.java.txt @@ -0,0 +1,57 @@ +====1 +1:46c + private long edgeCount; +2:45a +3:45a +====1 +1:75c + } +2:74c +3:74c + } +====1 +1:78c + return edgeCount; +2:77c +3:77c + return g.getAllEdges().maxId(); +==== +1:86c + public abstract int addEdge(TLongList nodes, int flags); +2:85c + public abstract int addEdge(TLongList nodes, int flags, String name); +3:85c + public abstract void addEdge(TLongList nodes, int flags, String name); +====1 +1:88c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags) { +2:87c +3:87c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags, String name) { +====1 +1:116a +2:116c +3:116c + iter.name(name); +====1 +1:131c + void cleanup() { +2:131c +3:131c + void finishedReading() { +==== +1:141,142c + int successfullAdded = addEdge(wayNodes, flags); + edgeCount += successfullAdded; +2:141,149c + <<<<<<< HEAD + int successfullAdded = addEdge(wayNodes, flags, (String) outProperties.get("wayName")); + edgeCount += successfullAdded; + ||||||| c4079cd49f + int successfullAdded = addEdge(wayNodes, flags); + edgeCount += successfullAdded; + ======= + addEdge(wayNodes, flags); + >>>>>>> TEMP_RIGHT_BRANCH +3:141c + addEdge(wayNodes, flags, (String) outProperties.get("wayName")); diff --git a/src/python/merge_conflict_analysis_diffs/1120/spork/diff_AbstractMyBitSetTest.java.txt b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_AbstractMyBitSetTest.java.txt new file mode 100644 index 0000000000..020c97b3ac --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_AbstractMyBitSetTest.java.txt @@ -0,0 +1,48 @@ +====1 +1:26c + * @author Peter Karich, +2:26c +3:26c + * @author Peter Karich +====3 +1:30c +2:30c + public abstract MyBitSet createBitSet(int no); +3:30c + public abstract GHBitSet createBitSet(int no); +====3 +1:34c +2:34c + MyBitSet bs = createBitSet(100); +3:34c + GHBitSet bs = createBitSet(100); +====3 +1:38c +2:38c + MyBitSet copyBS = createBitSet(10); +3:38c + GHBitSet copyBS = createBitSet(10); +====3 +1:61c +2:61c + MyBitSet bs = createBitSet(100); +3:61c + GHBitSet bs = createBitSet(100); +====3 +1:69c +2:69c + MyBitSet bs = createBitSet(100); +3:69c + GHBitSet bs = createBitSet(100); +====3 +1:80c +2:80c + MyBitSet bs = createBitSet(8); +3:80c + GHBitSet bs = createBitSet(8); +====3 +1:97c +2:97c + MyBitSet bs = createBitSet(100); +3:97c + GHBitSet bs = createBitSet(100); diff --git a/src/python/merge_conflict_analysis_diffs/1120/spork/diff_BitUtil.java.txt b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_BitUtil.java.txt new file mode 100644 index 0000000000..ab9d41dc1e --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_BitUtil.java.txt @@ -0,0 +1,14 @@ +====1 +1:24c + * @author Peter Karich, +2:24c +3:24c + * @author Peter Karich +====3 +1:108a +2:108a +3:109,112c + public static long toLong(int high, int low) { + return ((long) high << 32) | (low & 0xFFFFFFFFL); + } + diff --git a/src/python/merge_conflict_analysis_diffs/1120/spork/diff_GHBitSet.java.txt b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_GHBitSet.java.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/python/merge_conflict_analysis_diffs/1120/spork/diff_GHTBitSet.java.txt b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_GHTBitSet.java.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/python/merge_conflict_analysis_diffs/1120/spork/diff_GHUtility.java.txt b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_GHUtility.java.txt new file mode 100644 index 0000000000..6193efa551 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_GHUtility.java.txt @@ -0,0 +1,57 @@ +====3 +1:21,22c +2:21,22c + import com.graphhopper.coll.MyBitSet; + import com.graphhopper.coll.MyBitSetImpl; +3:21,22c + import com.graphhopper.coll.GHBitSet; + import com.graphhopper.coll.GHBitSetImpl; +====1 +1:41c + * @author Peter Karich, +2:41c +3:41c + * @author Peter Karich +====3 +1:151c +2:151c + final MyBitSetImpl bitset = new MyBitSetImpl(nodes); +3:151c + final GHBitSetImpl bitset = new GHBitSetImpl(nodes); +====3 +1:156c +2:156c + @Override protected MyBitSet createBitSet(int size) { +3:156c + @Override protected GHBitSet createBitSet(int size) { +====3 +1:173c +2:173c + MyBitSet bitset = new MyBitSetImpl(len); +3:173c + GHBitSet bitset = new GHBitSetImpl(len); +====3 +1:249c +2:249c + MyBitSet bitset = new MyBitSetImpl(len); +3:249c + GHBitSet bitset = new GHBitSetImpl(len); +====1 +1:271c + +2:270a +3:270a +====1 +1:328a +2:328,337c +3:328,337c + @Override + public String name() { + throw new UnsupportedOperationException("Not supported yet."); + } + + @Override + public void name(String name) { + throw new UnsupportedOperationException("Not supported yet."); + } + diff --git a/src/python/merge_conflict_analysis_diffs/1120/spork/diff_GraphStorage.java.txt b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_GraphStorage.java.txt new file mode 100644 index 0000000000..937ed2ae51 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_GraphStorage.java.txt @@ -0,0 +1,125 @@ +====3 +1:21,22c +2:21,22c + import com.graphhopper.coll.MyBitSet; + import com.graphhopper.coll.MyBitSetImpl; +3:21,22c + import com.graphhopper.coll.GHBitSet; + import com.graphhopper.coll.GHBitSetImpl; +====1 +1:26a +2:27c +3:27c + import com.graphhopper.search.NameIndex; +====1 +1:55c + protected final int E_NODEA, E_NODEB, E_LINKA, E_LINKB, E_DIST, E_FLAGS, E_GEO; +2:56c +3:56c + protected final int E_NODEA, E_NODEB, E_LINKA, E_LINKB, E_DIST, E_FLAGS, E_NAME, E_GEO; +====3 +1:75c +2:76c + private MyBitSet removedNodes; +3:76c + private GHBitSet removedNodes; +====1 +1:83a +2:85c +3:85c + private NameIndex nameIndex; +====1 +1:92a +2:95c +3:95c + this.nameIndex = new NameIndex(dir); +====1 +1:99a +2:103c +3:103c + E_NAME = nextEdgeEntryIndex(); +====1 +1:145a +2:150c +3:150c + nameIndex.segmentSize(bytes); +====1 +1:160c + geometry.create((long) initBytes); +2:165,166c +3:165,166c + geometry.create(1000); + nameIndex.create(1000); +====1 +1:481a +2:488,499c +3:488,499c + @Override + public String name() { + int nameIndexRef = edges.getInt(edgePointer + E_NAME); + return nameIndex.get(nameIndexRef); + } + + @Override + public void name(String name) { + int nameIndexRef = nameIndex.put(name); + edges.setInt(edgePointer + E_NAME, nameIndexRef); + } + +====1 +1:644a +2:663,674c +3:663,674c + @Override + public String name() { + int nameIndexRef = edges.getInt(edgePointer + E_NAME); + return nameIndex.get(nameIndexRef); + } + + @Override + public void name(String name) { + int nameIndexRef = nameIndex.put(name); + edges.setInt(edgePointer + E_NAME, nameIndexRef); + } + +====3 +1:720c +2:750c + clonedG.removedNodes = removedNodes.copyTo(new MyBitSetImpl()); +3:750c + clonedG.removedNodes = removedNodes.copyTo(new GHBitSetImpl()); +====3 +1:724c +2:754c + private MyBitSet removedNodes() { +3:754c + private GHBitSet removedNodes() { +====3 +1:726c +2:756c + removedNodes = new MyBitSetImpl((int) (nodes.capacity() / 4)); +3:756c + removedNodes = new GHBitSetImpl((int) (nodes.capacity() / 4)); +====3 +1:799c +2:829c + MyBitSet toRemoveSet = new MyBitSetImpl(removeNodeCount); +3:829c + GHBitSet toRemoveSet = new GHBitSetImpl(removeNodeCount); +====3 +1:843c +2:873c + MyBitSet toMoveSet = new MyBitSetImpl(removeNodeCount * 3); +3:873c + GHBitSet toMoveSet = new GHBitSetImpl(removeNodeCount * 3); +====1 +1:938a +2:969,970c +3:969,970c + if (!nameIndex.loadExisting()) + throw new IllegalStateException("cannot load name index. corrupt file or directory? " + dir); +====1 +1:983a +2:1016c +3:1016c + nameIndex.flush(); diff --git a/src/python/merge_conflict_analysis_diffs/1120/spork/diff_Helper.java.txt b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_Helper.java.txt new file mode 100644 index 0000000000..3b8ec7467c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_Helper.java.txt @@ -0,0 +1,29 @@ +====3 +1:21a +2:21a +3:22c + import com.graphhopper.storage.DataAccess; +====1 +1:43c + * @author Peter Karich, +2:43c +3:44c + * @author Peter Karich +====3 +1:166c +2:166c + if ("car".equals(str)) +3:167c + if (str.isEmpty() || "car".equals(str)) +====3 +1:172c +2:172c + throw new RuntimeException("Not found " + str); +3:173c + throw new RuntimeException("VehicleEncoder not found " + str); +====1 +1:315c + public static final int VERSION_FILE = 6; +2:315c +3:316c + public static final int VERSION_FILE = 7; diff --git a/src/python/merge_conflict_analysis_diffs/1120/spork/diff_OSMReader.java.txt b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_OSMReader.java.txt new file mode 100644 index 0000000000..06b7a18701 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_OSMReader.java.txt @@ -0,0 +1,43 @@ +====1 +1:62c + * @author Peter Karich, +2:62c +3:62c + * @author Peter Karich +====3 +1:244c +2:244c + helper.cleanup(); +3:243a +====3 +1:250,251c +2:250,251c + logger.info("nodes " + n + ", there were " + preparation.subNetworks() + + " sub-networks. removed them => " + (prev - n) +3:249,251c + logger.info("edges: " + graphStorage.getAllEdges().maxId() + + "nodes " + n + ", there were " + preparation.subNetworks() + + " subnetworks. removed them => " + (prev - n) +====3 +1:278c +2:278c + logger.info("creating graph with expected nodes:" + nf(helper.expectedNodes())); +3:278c + logger.info("creating graph. Found nodes (pillar+tower):" + nf(helper.expectedNodes()) + ", " + Helper.getMemInfo()); +====3 +1:296,297c +2:296,297c + + " (" + skippedLocations + "), edges:" + nf(helper.edgeCount()) + + " " + Helper.getMemInfo()); +3:296c + + " (" + skippedLocations + ") " + Helper.getMemInfo()); +====3 +1:318c +2:318c + +3:316a +====3 +1:327a +2:327a +3:326c + helper.finishedReading(); diff --git a/src/python/merge_conflict_analysis_diffs/1120/spork/diff_OSMReaderHelper.java.txt b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_OSMReaderHelper.java.txt new file mode 100644 index 0000000000..818c2c2fc8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_OSMReaderHelper.java.txt @@ -0,0 +1,50 @@ +====3 +1:46c +2:46c + private long edgeCount; +3:45a +====3 +1:75c +2:75c + } +3:74c + } +====3 +1:78c +2:78c + return edgeCount; +3:77c + return g.getAllEdges().maxId(); +==== +1:86c + public abstract int addEdge(TLongList nodes, int flags); +2:86c + public abstract int addEdge(TLongList nodes, int flags, String name); +3:85c + public abstract void addEdge(TLongList nodes, int flags, String name); +====1 +1:88c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags) { +2:88c +3:87c + int addEdge(int fromIndex, int toIndex, PointList pointList, int flags, String name) { +====1 +1:116a +2:117c +3:116c + iter.name(name); +====3 +1:131c +2:132c + void cleanup() { +3:131c + void finishedReading() { +==== +1:141,142c + int successfullAdded = addEdge(wayNodes, flags); + edgeCount += successfullAdded; +2:142,143c + int successfullAdded = addEdge(wayNodes, flags, (String) outProperties.get("wayName")); + edgeCount += successfullAdded; +3:141c + addEdge(wayNodes, flags, (String) outProperties.get("wayName")); diff --git a/src/python/merge_conflict_analysis_diffs/1120/spork/diff_OSMReaderHelperDoubleParse.java.txt b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_OSMReaderHelperDoubleParse.java.txt new file mode 100644 index 0000000000..73264f02a0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_OSMReaderHelperDoubleParse.java.txt @@ -0,0 +1,88 @@ +====3 +1:21a +2:21a +3:22,23c + import com.graphhopper.coll.LongIntMap; + import com.graphhopper.coll.GHLongIntBTree; +====3 +1:55,57c +2:55,57c + private BigLongIntMap osmIdToIndexMap; + // very slow: private SparseLongLongArray osmIdToIndexMap; + // not applicable as ways introduces the nodes in 'wrong' order: private OSMIDSegmentedMap +3:57c + private LongIntMap osmIdToIndexMap; +====3 +1:69c +2:69c + // TODO check out if we better should use http://en.wikipedia.org/wiki/Segment_tree +3:69,75c + + // Using the correct Map is hard. We need a very memory + // efficient and fast solution for very big data sets! + // very slow: new SparseLongLongArray + // only append and update possible: new OSMIDMap + // not applicable as ways introduces the nodes in 'wrong' order: new OSMIDSegmentedMap + // memory overhead due to hash: +====3 +1:70a +2:70a +3:77c + // osmIdToIndexMap = new MyLongIntBTree(200); +==== +1:107c + public int addEdge(TLongList osmIds, int flags) { +2:107c + public int addEdge(TLongList osmIds, int flags, String name) { +3:114c + public void addEdge(TLongList osmIds, int flags, String name) { +====1 +1:132c + successfullyAdded += addEdge(firstNode, tmpNode, pointList, flags); +2:132c +3:139c + successfullyAdded += addEdge(firstNode, tmpNode, pointList, flags, name); +====1 +1:159c + successfullyAdded += addEdge(firstNode, tmpNode, pointList, flags); +2:159c +3:166c + successfullyAdded += addEdge(firstNode, tmpNode, pointList, flags, name); +====3 +1:166c +2:166c + return successfullyAdded; +3:172a +====3 +1:193a +2:193a +3:200,207c + private void printInfo(String str) { + LoggerFactory.getLogger(getClass()).info("finished " + str + " processing." + + " nodes: " + g.nodes() + ", osmIdMap.size:" + osmIdToIndexMap.size() + + ", osmIdMap:" + osmIdToIndexMap.memoryUsage() + "MB" + + ", osmIdMap.toString:" + osmIdToIndexMap + " " + + Helper.getMemInfo()); + } + +====3 +1:196,197c +2:196,197c + LoggerFactory.getLogger(getClass()).info("finished node processing. osmIdMap:" + + (int) (osmIdToIndexMap.capacity() * (12f + 1) / Helper.MB) + "MB, " + Helper.getMemInfo()); +3:210c + printInfo("node"); +====3 +1:201c +2:201c + void cleanup() { +3:214,216c + void finishedReading() { + osmIdToIndexMap.optimize(); + printInfo("way"); +====3 +1:247c +2:247c + + nf(osmIdToIndexMap.size()) + " (" + nf(osmIdToIndexMap.capacity()) + ") " +3:262c + + nf(osmIdToIndexMap.size()) + " (" + osmIdToIndexMap.memoryUsage() + "MB) " diff --git a/src/python/merge_conflict_analysis_diffs/1120/spork/diff_OSMReaderTest.java.txt b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_OSMReaderTest.java.txt new file mode 100644 index 0000000000..52a85ad586 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_OSMReaderTest.java.txt @@ -0,0 +1,23 @@ +====1 +1:44c + * @author Peter Karich, +2:44c +3:44c + * @author Peter Karich +====1 +1:108a +2:109c +3:109c + assertEquals("street 123", iter.name()); +====1 +1:120c + +2:121c +3:121c + +====1 +1:124c + +2:125c +3:125c + diff --git a/src/python/merge_conflict_analysis_diffs/1120/spork/diff_SimpleIntDeque.java.txt b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_SimpleIntDeque.java.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/python/merge_conflict_analysis_diffs/1120/spork/diff_SimpleIntDequeTest.java.txt b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_SimpleIntDequeTest.java.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/python/merge_conflict_analysis_diffs/1120/spork/diff_XFirstSearch.java.txt b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_XFirstSearch.java.txt new file mode 100644 index 0000000000..e4a99f7a7d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1120/spork/diff_XFirstSearch.java.txt @@ -0,0 +1,34 @@ +====3 +1:21,22c +2:21,22c + import com.graphhopper.coll.MyBitSet; + import com.graphhopper.coll.MyBitSetImpl; +3:21,22c + import com.graphhopper.coll.GHBitSet; + import com.graphhopper.coll.GHBitSetImpl; +====1 +1:29c + * @author Peter Karich, +2:29c +3:29c + * @author Peter Karich +====3 +1:45,46c +2:45,46c + protected MyBitSet createBitSet(int size) { + return new MyBitSetImpl(size); +3:45,46c + protected GHBitSet createBitSet(int size) { + return new GHBitSetImpl(size); +====3 +1:56c +2:56c + MyBitSet visited = createBitSet(g.nodes()); +3:56c + GHBitSet visited = createBitSet(g.nodes()); +====3 +1:95c +2:95c + static class MyHelperIntQueue extends MyIntDeque implements HelperColl { +3:95c + static class MyHelperIntQueue extends SimpleIntDeque implements HelperColl { diff --git a/src/python/merge_conflict_analysis_diffs/1177/git_hires_merge/diff_CHAlgoFactoryDecorator.java.txt b/src/python/merge_conflict_analysis_diffs/1177/git_hires_merge/diff_CHAlgoFactoryDecorator.java.txt new file mode 100644 index 0000000000..1819967566 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/git_hires_merge/diff_CHAlgoFactoryDecorator.java.txt @@ -0,0 +1,55 @@ +====1 +1:19a +2:20,21c +3:20,21c + import com.carrotsearch.hppc.IntObjectHashMap; + import com.carrotsearch.hppc.IntObjectMap; +====1 +1:28c + import com.graphhopper.util.Parameters; +2:29a +3:29a +====1 +1:29a +2:31c +3:31c + import com.graphhopper.util.Parameters.Routing; +====1 +1:36a +2:39c +3:39c + import static com.graphhopper.routing.weighting.TurnWeighting.INFINITE_UTURN_COSTS; +====1 +1:223c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +2:226c +3:226c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +====1 +1:254,270c + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + List entriesStrs = new ArrayList<>(); + boolean weightingMatchesButNotEdgeBased = false; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (p.isEdgeBased() == edgeBased && weightingMatches) + return p; + else if (weightingMatches) + weightingMatchesButNotEdgeBased = true; + + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + } + + String hint = weightingMatchesButNotEdgeBased + ? " The '" + Parameters.Routing.EDGE_BASED + "' url parameter is missing or does not fit the weightings. Its value was: '" + edgeBased + "'" + : ""; + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "." + hint); +2:257,263c +3:257,263c + Boolean edgeBased = map.has(Routing.EDGE_BASED) ? map.getBool(Routing.EDGE_BASED, false) : null; + Integer uTurnCosts = map.has(Routing.UTURN_COSTS) ? map.getInt(Routing.UTURN_COSTS, INFINITE_UTURN_COSTS) : null; + try { + return PCHSelector.select(getPreparations(), map, edgeBased, uTurnCosts); + } catch (NoSuchCHPreparationException e) { + throw new IllegalArgumentException(e.getMessage()); + } diff --git a/src/python/merge_conflict_analysis_diffs/1177/git_hires_merge/diff_GraphHopperIT.java.txt b/src/python/merge_conflict_analysis_diffs/1177/git_hires_merge/diff_GraphHopperIT.java.txt new file mode 100644 index 0000000000..38bdd97c80 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/git_hires_merge/diff_GraphHopperIT.java.txt @@ -0,0 +1,114 @@ +====1 +1:1025,1026c + // no edge_based parameter -> use node-based (because its faster) + assertMoscowNodeBased(tmpHopper, "none", true); +2:1025,1026c +3:1025,1026c + // no edge_based parameter -> use edge-based (because its there) + assertMoscowEdgeBased(tmpHopper, "none", true); +====1 +1:1053a +2:1054,1104c +3:1054,1104c + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + assertTrue(rsp.getErrors().toString().contains("Found 1 edge-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + +====1 +1:1055c + return assertMoscow(tmpHopper, edgeBasedParam, false, ch); +2:1106,1108c +3:1106,1108c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(400, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1059c + return assertMoscow(tmpHopper, edgeBasedParam, true, ch); +2:1112,1114c +3:1112,1114c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(1044, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1062c + private GHResponse assertMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean withTurnCosts, boolean ch) { +2:1117c +3:1117c + private GHResponse runMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean ch) { +====1 +1:1070,1072c + GHResponse rsp = tmpHopper.route(req); + assertEquals(withTurnCosts ? 1044 : 400, rsp.getBest().getDistance(), 1); + return rsp; +2:1125c +3:1125c + return tmpHopper.route(req); +====1 +1:1086a +2:1140,1156c +3:1140,1156c + @Test + public void testEncoderWithTurnCostSupport_stillAllows_nodeBasedRouting() { + // see #1698 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setGraphHopperLocation(tmpGraphFile). + setCHEnabled(false). + setEncodingManager(EncodingManager.create("foot,car|turn_costs=true")); + tmpHopper.importOrLoad(); + GHPoint p = new GHPoint(55.813357, 37.5958585); + GHPoint q = new GHPoint(55.811042, 37.594689); + GHRequest req = new GHRequest(p, q); + req.setVehicle("foot"); + GHResponse rsp = tmpHopper.route(req); + assertEquals("there should not be an error, but was: " + rsp.getErrors(), 0, rsp.getErrors().size()); + } + diff --git a/src/python/merge_conflict_analysis_diffs/1177/git_hires_merge/diff_changelog.txt.txt b/src/python/merge_conflict_analysis_diffs/1177/git_hires_merge/diff_changelog.txt.txt new file mode 100644 index 0000000000..fa8ed01105 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/git_hires_merge/diff_changelog.txt.txt @@ -0,0 +1,10 @@ +==== +1:18a +2:19,20c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + It is now possible to specify finite u-turn costs for CH preparation, #1671 +3:19,22c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + removed GraphHopper.set/getTraversalMode() methods, #1705 + edge-based CH is now chosen by default if it was prepared, #1706 + it is now possible to specify finite u-turn costs for CH preparation, #1671 diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort/diff_CHAlgoFactoryDecorator.java.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort/diff_CHAlgoFactoryDecorator.java.txt new file mode 100644 index 0000000000..a0a5039e19 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort/diff_CHAlgoFactoryDecorator.java.txt @@ -0,0 +1,126 @@ +====1 +1:19a +2:20,21c +3:20,21c + import com.carrotsearch.hppc.IntObjectHashMap; + import com.carrotsearch.hppc.IntObjectMap; +====1 +1:28c + import com.graphhopper.util.Parameters; +2:29a +3:29a +====1 +1:29a +2:31c +3:31c + import com.graphhopper.util.Parameters.Routing; +====1 +1:36a +2:39c +3:39c + import static com.graphhopper.routing.weighting.TurnWeighting.INFINITE_UTURN_COSTS; +====1 +1:223c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +2:226c +3:226c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +==== +1:254,270c + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + List entriesStrs = new ArrayList<>(); + boolean weightingMatchesButNotEdgeBased = false; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (p.isEdgeBased() == edgeBased && weightingMatches) + return p; + else if (weightingMatches) + weightingMatchesButNotEdgeBased = true; + + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + } + + String hint = weightingMatchesButNotEdgeBased + ? " The '" + Parameters.Routing.EDGE_BASED + "' url parameter is missing or does not fit the weightings. Its value was: '" + edgeBased + "'" + : ""; + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "." + hint); +2:257,327c + <<<<<<< HEAD + Boolean edgeBased = map.has(Routing.EDGE_BASED) ? map.getBool(Routing.EDGE_BASED, false) : null; + Integer uTurnCosts = map.has(Routing.UTURN_COSTS) ? map.getInt(Routing.UTURN_COSTS, INFINITE_UTURN_COSTS) : null; + try { + return PCHSelector.select(getPreparations(), map, edgeBased, uTurnCosts); + } catch (NoSuchCHPreparationException e) { + throw new IllegalArgumentException(e.getMessage()); + ||||||| 34b0d48a78 + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + List entriesStrs = new ArrayList<>(); + boolean weightingMatchesButNotEdgeBased = false; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (p.isEdgeBased() == edgeBased && weightingMatches) + return p; + else if (weightingMatches) + weightingMatchesButNotEdgeBased = true; + + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + ======= + List entriesStrs = new ArrayList<>(); + PrepareContractionHierarchies edgeBasedPCH = null; + PrepareContractionHierarchies nodeBasedPCH = null; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (weightingMatches) { + if (p.isEdgeBased()) { + edgeBasedPCH = p; + } else { + nodeBasedPCH = p; + } + } + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + >>>>>>> TEMP_RIGHT_BRANCH + } + <<<<<<< HEAD + ||||||| 34b0d48a78 + + String hint = weightingMatchesButNotEdgeBased + ? " The '" + Parameters.Routing.EDGE_BASED + "' url parameter is missing or does not fit the weightings. Its value was: '" + edgeBased + "'" + : ""; + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "." + hint); + ======= + + if (edgeBasedPCH == null && nodeBasedPCH == null) { + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "."); + } + if (map.has(Parameters.Routing.EDGE_BASED)) { + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + if (edgeBased && edgeBasedPCH != null) { + return edgeBasedPCH; + } + if (!edgeBased && nodeBasedPCH != null) { + return nodeBasedPCH; + } + + if (edgeBased) { + throw new IllegalArgumentException("Found a node-based CH preparation for weighting map " + map + ", but requested edge-based CH. " + + "You either need to configure edge-based CH preparation or set the '" + Parameters.Routing.EDGE_BASED + "' " + + "request parameter to 'false' (was 'true'). all entries: " + entriesStrs); + } else { + throw new IllegalArgumentException("Found an edge-based CH preparation for weighting map " + map + ", but requested node-based CH. " + + "You either need to configure node-based CH preparation or set the '" + Parameters.Routing.EDGE_BASED + "' " + + "request parameter to 'true' (was 'false'). all entries: " + entriesStrs); + } + } else { + // no edge_based parameter was set, we determine the CH preparation based on what is there (and prefer edge-based + // if we can choose) + return edgeBasedPCH != null ? edgeBasedPCH : nodeBasedPCH; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:257,263c + Boolean edgeBased = map.has(Routing.EDGE_BASED) ? map.getBool(Routing.EDGE_BASED, false) : null; + Integer uTurnCosts = map.has(Routing.UTURN_COSTS) ? map.getInt(Routing.UTURN_COSTS, INFINITE_UTURN_COSTS) : null; + try { + return PCHSelector.select(getPreparations(), map, edgeBased, uTurnCosts); + } catch (NoSuchCHPreparationException e) { + throw new IllegalArgumentException(e.getMessage()); + } diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort/diff_GraphHopperIT.java.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort/diff_GraphHopperIT.java.txt new file mode 100644 index 0000000000..e4fc7273b8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort/diff_GraphHopperIT.java.txt @@ -0,0 +1,170 @@ +====1 +1:1025,1026c + // no edge_based parameter -> use node-based (because its faster) + assertMoscowNodeBased(tmpHopper, "none", true); +2:1025,1026c +3:1025,1026c + // no edge_based parameter -> use edge-based (because its there) + assertMoscowEdgeBased(tmpHopper, "none", true); +==== +1:1053a +2:1054,1109c + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + <<<<<<< HEAD + assertTrue(rsp.getErrors().toString().contains("Found 1 edge-based CH preparation")); + ||||||| 34b0d48a78 + ======= + assertTrue(rsp.getErrors().toString().contains("Found an edge-based CH preparation")); + >>>>>>> TEMP_RIGHT_BRANCH + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + +3:1054,1104c + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + assertTrue(rsp.getErrors().toString().contains("Found 1 edge-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + +====1 +1:1055c + return assertMoscow(tmpHopper, edgeBasedParam, false, ch); +2:1111,1113c +3:1106,1108c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(400, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1059c + return assertMoscow(tmpHopper, edgeBasedParam, true, ch); +2:1117,1119c +3:1112,1114c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(1044, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1062c + private GHResponse assertMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean withTurnCosts, boolean ch) { +2:1122c +3:1117c + private GHResponse runMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean ch) { +====1 +1:1070,1072c + GHResponse rsp = tmpHopper.route(req); + assertEquals(withTurnCosts ? 1044 : 400, rsp.getBest().getDistance(), 1); + return rsp; +2:1130c +3:1125c + return tmpHopper.route(req); +====1 +1:1086a +2:1145,1161c +3:1140,1156c + @Test + public void testEncoderWithTurnCostSupport_stillAllows_nodeBasedRouting() { + // see #1698 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setGraphHopperLocation(tmpGraphFile). + setCHEnabled(false). + setEncodingManager(EncodingManager.create("foot,car|turn_costs=true")); + tmpHopper.importOrLoad(); + GHPoint p = new GHPoint(55.813357, 37.5958585); + GHPoint q = new GHPoint(55.811042, 37.594689); + GHRequest req = new GHRequest(p, q); + req.setVehicle("foot"); + GHResponse rsp = tmpHopper.route(req); + assertEquals("there should not be an error, but was: " + rsp.getErrors(), 0, rsp.getErrors().size()); + } + diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort/diff_changelog.txt.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort/diff_changelog.txt.txt new file mode 100644 index 0000000000..9ba7d938e0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort/diff_changelog.txt.txt @@ -0,0 +1,16 @@ +==== +1:18a +2:19,26c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + <<<<<<< HEAD + It is now possible to specify finite u-turn costs for CH preparation, #1671 + ||||||| 34b0d48a78 + ======= + removed GraphHopper.set/getTraversalMode() methods, #1705 + edge-based CH is now chosen by default if it was prepared, #1706 + >>>>>>> TEMP_RIGHT_BRANCH +3:19,22c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + removed GraphHopper.set/getTraversalMode() methods, #1705 + edge-based CH is now chosen by default if it was prepared, #1706 + it is now possible to specify finite u-turn costs for CH preparation, #1671 diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_adjacent/diff_CHAlgoFactoryDecorator.java.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_adjacent/diff_CHAlgoFactoryDecorator.java.txt new file mode 100644 index 0000000000..1819967566 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_adjacent/diff_CHAlgoFactoryDecorator.java.txt @@ -0,0 +1,55 @@ +====1 +1:19a +2:20,21c +3:20,21c + import com.carrotsearch.hppc.IntObjectHashMap; + import com.carrotsearch.hppc.IntObjectMap; +====1 +1:28c + import com.graphhopper.util.Parameters; +2:29a +3:29a +====1 +1:29a +2:31c +3:31c + import com.graphhopper.util.Parameters.Routing; +====1 +1:36a +2:39c +3:39c + import static com.graphhopper.routing.weighting.TurnWeighting.INFINITE_UTURN_COSTS; +====1 +1:223c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +2:226c +3:226c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +====1 +1:254,270c + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + List entriesStrs = new ArrayList<>(); + boolean weightingMatchesButNotEdgeBased = false; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (p.isEdgeBased() == edgeBased && weightingMatches) + return p; + else if (weightingMatches) + weightingMatchesButNotEdgeBased = true; + + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + } + + String hint = weightingMatchesButNotEdgeBased + ? " The '" + Parameters.Routing.EDGE_BASED + "' url parameter is missing or does not fit the weightings. Its value was: '" + edgeBased + "'" + : ""; + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "." + hint); +2:257,263c +3:257,263c + Boolean edgeBased = map.has(Routing.EDGE_BASED) ? map.getBool(Routing.EDGE_BASED, false) : null; + Integer uTurnCosts = map.has(Routing.UTURN_COSTS) ? map.getInt(Routing.UTURN_COSTS, INFINITE_UTURN_COSTS) : null; + try { + return PCHSelector.select(getPreparations(), map, edgeBased, uTurnCosts); + } catch (NoSuchCHPreparationException e) { + throw new IllegalArgumentException(e.getMessage()); + } diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_adjacent/diff_GraphHopperIT.java.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_adjacent/diff_GraphHopperIT.java.txt new file mode 100644 index 0000000000..38bdd97c80 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_adjacent/diff_GraphHopperIT.java.txt @@ -0,0 +1,114 @@ +====1 +1:1025,1026c + // no edge_based parameter -> use node-based (because its faster) + assertMoscowNodeBased(tmpHopper, "none", true); +2:1025,1026c +3:1025,1026c + // no edge_based parameter -> use edge-based (because its there) + assertMoscowEdgeBased(tmpHopper, "none", true); +====1 +1:1053a +2:1054,1104c +3:1054,1104c + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + assertTrue(rsp.getErrors().toString().contains("Found 1 edge-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + +====1 +1:1055c + return assertMoscow(tmpHopper, edgeBasedParam, false, ch); +2:1106,1108c +3:1106,1108c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(400, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1059c + return assertMoscow(tmpHopper, edgeBasedParam, true, ch); +2:1112,1114c +3:1112,1114c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(1044, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1062c + private GHResponse assertMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean withTurnCosts, boolean ch) { +2:1117c +3:1117c + private GHResponse runMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean ch) { +====1 +1:1070,1072c + GHResponse rsp = tmpHopper.route(req); + assertEquals(withTurnCosts ? 1044 : 400, rsp.getBest().getDistance(), 1); + return rsp; +2:1125c +3:1125c + return tmpHopper.route(req); +====1 +1:1086a +2:1140,1156c +3:1140,1156c + @Test + public void testEncoderWithTurnCostSupport_stillAllows_nodeBasedRouting() { + // see #1698 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setGraphHopperLocation(tmpGraphFile). + setCHEnabled(false). + setEncodingManager(EncodingManager.create("foot,car|turn_costs=true")); + tmpHopper.importOrLoad(); + GHPoint p = new GHPoint(55.813357, 37.5958585); + GHPoint q = new GHPoint(55.811042, 37.594689); + GHRequest req = new GHRequest(p, q); + req.setVehicle("foot"); + GHResponse rsp = tmpHopper.route(req); + assertEquals("there should not be an error, but was: " + rsp.getErrors(), 0, rsp.getErrors().size()); + } + diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_adjacent/diff_changelog.txt.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_adjacent/diff_changelog.txt.txt new file mode 100644 index 0000000000..fa8ed01105 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_adjacent/diff_changelog.txt.txt @@ -0,0 +1,10 @@ +==== +1:18a +2:19,20c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + It is now possible to specify finite u-turn costs for CH preparation, #1671 +3:19,22c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + removed GraphHopper.set/getTraversalMode() methods, #1705 + edge-based CH is now chosen by default if it was prepared, #1706 + it is now possible to specify finite u-turn costs for CH preparation, #1671 diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_ignorespace/diff_CHAlgoFactoryDecorator.java.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_ignorespace/diff_CHAlgoFactoryDecorator.java.txt new file mode 100644 index 0000000000..a0a5039e19 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_ignorespace/diff_CHAlgoFactoryDecorator.java.txt @@ -0,0 +1,126 @@ +====1 +1:19a +2:20,21c +3:20,21c + import com.carrotsearch.hppc.IntObjectHashMap; + import com.carrotsearch.hppc.IntObjectMap; +====1 +1:28c + import com.graphhopper.util.Parameters; +2:29a +3:29a +====1 +1:29a +2:31c +3:31c + import com.graphhopper.util.Parameters.Routing; +====1 +1:36a +2:39c +3:39c + import static com.graphhopper.routing.weighting.TurnWeighting.INFINITE_UTURN_COSTS; +====1 +1:223c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +2:226c +3:226c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +==== +1:254,270c + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + List entriesStrs = new ArrayList<>(); + boolean weightingMatchesButNotEdgeBased = false; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (p.isEdgeBased() == edgeBased && weightingMatches) + return p; + else if (weightingMatches) + weightingMatchesButNotEdgeBased = true; + + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + } + + String hint = weightingMatchesButNotEdgeBased + ? " The '" + Parameters.Routing.EDGE_BASED + "' url parameter is missing or does not fit the weightings. Its value was: '" + edgeBased + "'" + : ""; + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "." + hint); +2:257,327c + <<<<<<< HEAD + Boolean edgeBased = map.has(Routing.EDGE_BASED) ? map.getBool(Routing.EDGE_BASED, false) : null; + Integer uTurnCosts = map.has(Routing.UTURN_COSTS) ? map.getInt(Routing.UTURN_COSTS, INFINITE_UTURN_COSTS) : null; + try { + return PCHSelector.select(getPreparations(), map, edgeBased, uTurnCosts); + } catch (NoSuchCHPreparationException e) { + throw new IllegalArgumentException(e.getMessage()); + ||||||| 34b0d48a78 + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + List entriesStrs = new ArrayList<>(); + boolean weightingMatchesButNotEdgeBased = false; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (p.isEdgeBased() == edgeBased && weightingMatches) + return p; + else if (weightingMatches) + weightingMatchesButNotEdgeBased = true; + + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + ======= + List entriesStrs = new ArrayList<>(); + PrepareContractionHierarchies edgeBasedPCH = null; + PrepareContractionHierarchies nodeBasedPCH = null; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (weightingMatches) { + if (p.isEdgeBased()) { + edgeBasedPCH = p; + } else { + nodeBasedPCH = p; + } + } + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + >>>>>>> TEMP_RIGHT_BRANCH + } + <<<<<<< HEAD + ||||||| 34b0d48a78 + + String hint = weightingMatchesButNotEdgeBased + ? " The '" + Parameters.Routing.EDGE_BASED + "' url parameter is missing or does not fit the weightings. Its value was: '" + edgeBased + "'" + : ""; + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "." + hint); + ======= + + if (edgeBasedPCH == null && nodeBasedPCH == null) { + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "."); + } + if (map.has(Parameters.Routing.EDGE_BASED)) { + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + if (edgeBased && edgeBasedPCH != null) { + return edgeBasedPCH; + } + if (!edgeBased && nodeBasedPCH != null) { + return nodeBasedPCH; + } + + if (edgeBased) { + throw new IllegalArgumentException("Found a node-based CH preparation for weighting map " + map + ", but requested edge-based CH. " + + "You either need to configure edge-based CH preparation or set the '" + Parameters.Routing.EDGE_BASED + "' " + + "request parameter to 'false' (was 'true'). all entries: " + entriesStrs); + } else { + throw new IllegalArgumentException("Found an edge-based CH preparation for weighting map " + map + ", but requested node-based CH. " + + "You either need to configure node-based CH preparation or set the '" + Parameters.Routing.EDGE_BASED + "' " + + "request parameter to 'true' (was 'false'). all entries: " + entriesStrs); + } + } else { + // no edge_based parameter was set, we determine the CH preparation based on what is there (and prefer edge-based + // if we can choose) + return edgeBasedPCH != null ? edgeBasedPCH : nodeBasedPCH; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:257,263c + Boolean edgeBased = map.has(Routing.EDGE_BASED) ? map.getBool(Routing.EDGE_BASED, false) : null; + Integer uTurnCosts = map.has(Routing.UTURN_COSTS) ? map.getInt(Routing.UTURN_COSTS, INFINITE_UTURN_COSTS) : null; + try { + return PCHSelector.select(getPreparations(), map, edgeBased, uTurnCosts); + } catch (NoSuchCHPreparationException e) { + throw new IllegalArgumentException(e.getMessage()); + } diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_ignorespace/diff_GraphHopperIT.java.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_ignorespace/diff_GraphHopperIT.java.txt new file mode 100644 index 0000000000..e4fc7273b8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_ignorespace/diff_GraphHopperIT.java.txt @@ -0,0 +1,170 @@ +====1 +1:1025,1026c + // no edge_based parameter -> use node-based (because its faster) + assertMoscowNodeBased(tmpHopper, "none", true); +2:1025,1026c +3:1025,1026c + // no edge_based parameter -> use edge-based (because its there) + assertMoscowEdgeBased(tmpHopper, "none", true); +==== +1:1053a +2:1054,1109c + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + <<<<<<< HEAD + assertTrue(rsp.getErrors().toString().contains("Found 1 edge-based CH preparation")); + ||||||| 34b0d48a78 + ======= + assertTrue(rsp.getErrors().toString().contains("Found an edge-based CH preparation")); + >>>>>>> TEMP_RIGHT_BRANCH + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + +3:1054,1104c + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + assertTrue(rsp.getErrors().toString().contains("Found 1 edge-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + +====1 +1:1055c + return assertMoscow(tmpHopper, edgeBasedParam, false, ch); +2:1111,1113c +3:1106,1108c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(400, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1059c + return assertMoscow(tmpHopper, edgeBasedParam, true, ch); +2:1117,1119c +3:1112,1114c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(1044, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1062c + private GHResponse assertMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean withTurnCosts, boolean ch) { +2:1122c +3:1117c + private GHResponse runMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean ch) { +====1 +1:1070,1072c + GHResponse rsp = tmpHopper.route(req); + assertEquals(withTurnCosts ? 1044 : 400, rsp.getBest().getDistance(), 1); + return rsp; +2:1130c +3:1125c + return tmpHopper.route(req); +====1 +1:1086a +2:1145,1161c +3:1140,1156c + @Test + public void testEncoderWithTurnCostSupport_stillAllows_nodeBasedRouting() { + // see #1698 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setGraphHopperLocation(tmpGraphFile). + setCHEnabled(false). + setEncodingManager(EncodingManager.create("foot,car|turn_costs=true")); + tmpHopper.importOrLoad(); + GHPoint p = new GHPoint(55.813357, 37.5958585); + GHPoint q = new GHPoint(55.811042, 37.594689); + GHRequest req = new GHRequest(p, q); + req.setVehicle("foot"); + GHResponse rsp = tmpHopper.route(req); + assertEquals("there should not be an error, but was: " + rsp.getErrors(), 0, rsp.getErrors().size()); + } + diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_ignorespace/diff_changelog.txt.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_ignorespace/diff_changelog.txt.txt new file mode 100644 index 0000000000..9ba7d938e0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_ignorespace/diff_changelog.txt.txt @@ -0,0 +1,16 @@ +==== +1:18a +2:19,26c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + <<<<<<< HEAD + It is now possible to specify finite u-turn costs for CH preparation, #1671 + ||||||| 34b0d48a78 + ======= + removed GraphHopper.set/getTraversalMode() methods, #1705 + edge-based CH is now chosen by default if it was prepared, #1706 + >>>>>>> TEMP_RIGHT_BRANCH +3:19,22c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + removed GraphHopper.set/getTraversalMode() methods, #1705 + edge-based CH is now chosen by default if it was prepared, #1706 + it is now possible to specify finite u-turn costs for CH preparation, #1671 diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_imports/diff_CHAlgoFactoryDecorator.java.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_imports/diff_CHAlgoFactoryDecorator.java.txt new file mode 100644 index 0000000000..1819967566 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_imports/diff_CHAlgoFactoryDecorator.java.txt @@ -0,0 +1,55 @@ +====1 +1:19a +2:20,21c +3:20,21c + import com.carrotsearch.hppc.IntObjectHashMap; + import com.carrotsearch.hppc.IntObjectMap; +====1 +1:28c + import com.graphhopper.util.Parameters; +2:29a +3:29a +====1 +1:29a +2:31c +3:31c + import com.graphhopper.util.Parameters.Routing; +====1 +1:36a +2:39c +3:39c + import static com.graphhopper.routing.weighting.TurnWeighting.INFINITE_UTURN_COSTS; +====1 +1:223c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +2:226c +3:226c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +====1 +1:254,270c + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + List entriesStrs = new ArrayList<>(); + boolean weightingMatchesButNotEdgeBased = false; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (p.isEdgeBased() == edgeBased && weightingMatches) + return p; + else if (weightingMatches) + weightingMatchesButNotEdgeBased = true; + + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + } + + String hint = weightingMatchesButNotEdgeBased + ? " The '" + Parameters.Routing.EDGE_BASED + "' url parameter is missing or does not fit the weightings. Its value was: '" + edgeBased + "'" + : ""; + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "." + hint); +2:257,263c +3:257,263c + Boolean edgeBased = map.has(Routing.EDGE_BASED) ? map.getBool(Routing.EDGE_BASED, false) : null; + Integer uTurnCosts = map.has(Routing.UTURN_COSTS) ? map.getInt(Routing.UTURN_COSTS, INFINITE_UTURN_COSTS) : null; + try { + return PCHSelector.select(getPreparations(), map, edgeBased, uTurnCosts); + } catch (NoSuchCHPreparationException e) { + throw new IllegalArgumentException(e.getMessage()); + } diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_imports/diff_GraphHopperIT.java.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_imports/diff_GraphHopperIT.java.txt new file mode 100644 index 0000000000..38bdd97c80 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_imports/diff_GraphHopperIT.java.txt @@ -0,0 +1,114 @@ +====1 +1:1025,1026c + // no edge_based parameter -> use node-based (because its faster) + assertMoscowNodeBased(tmpHopper, "none", true); +2:1025,1026c +3:1025,1026c + // no edge_based parameter -> use edge-based (because its there) + assertMoscowEdgeBased(tmpHopper, "none", true); +====1 +1:1053a +2:1054,1104c +3:1054,1104c + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + assertTrue(rsp.getErrors().toString().contains("Found 1 edge-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + +====1 +1:1055c + return assertMoscow(tmpHopper, edgeBasedParam, false, ch); +2:1106,1108c +3:1106,1108c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(400, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1059c + return assertMoscow(tmpHopper, edgeBasedParam, true, ch); +2:1112,1114c +3:1112,1114c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(1044, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1062c + private GHResponse assertMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean withTurnCosts, boolean ch) { +2:1117c +3:1117c + private GHResponse runMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean ch) { +====1 +1:1070,1072c + GHResponse rsp = tmpHopper.route(req); + assertEquals(withTurnCosts ? 1044 : 400, rsp.getBest().getDistance(), 1); + return rsp; +2:1125c +3:1125c + return tmpHopper.route(req); +====1 +1:1086a +2:1140,1156c +3:1140,1156c + @Test + public void testEncoderWithTurnCostSupport_stillAllows_nodeBasedRouting() { + // see #1698 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setGraphHopperLocation(tmpGraphFile). + setCHEnabled(false). + setEncodingManager(EncodingManager.create("foot,car|turn_costs=true")); + tmpHopper.importOrLoad(); + GHPoint p = new GHPoint(55.813357, 37.5958585); + GHPoint q = new GHPoint(55.811042, 37.594689); + GHRequest req = new GHRequest(p, q); + req.setVehicle("foot"); + GHResponse rsp = tmpHopper.route(req); + assertEquals("there should not be an error, but was: " + rsp.getErrors(), 0, rsp.getErrors().size()); + } + diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_imports/diff_changelog.txt.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_imports/diff_changelog.txt.txt new file mode 100644 index 0000000000..fa8ed01105 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_imports/diff_changelog.txt.txt @@ -0,0 +1,10 @@ +==== +1:18a +2:19,20c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + It is now possible to specify finite u-turn costs for CH preparation, #1671 +3:19,22c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + removed GraphHopper.set/getTraversalMode() methods, #1705 + edge-based CH is now chosen by default if it was prepared, #1706 + it is now possible to specify finite u-turn costs for CH preparation, #1671 diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_imports_ignorespace/diff_CHAlgoFactoryDecorator.java.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_imports_ignorespace/diff_CHAlgoFactoryDecorator.java.txt new file mode 100644 index 0000000000..1819967566 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_imports_ignorespace/diff_CHAlgoFactoryDecorator.java.txt @@ -0,0 +1,55 @@ +====1 +1:19a +2:20,21c +3:20,21c + import com.carrotsearch.hppc.IntObjectHashMap; + import com.carrotsearch.hppc.IntObjectMap; +====1 +1:28c + import com.graphhopper.util.Parameters; +2:29a +3:29a +====1 +1:29a +2:31c +3:31c + import com.graphhopper.util.Parameters.Routing; +====1 +1:36a +2:39c +3:39c + import static com.graphhopper.routing.weighting.TurnWeighting.INFINITE_UTURN_COSTS; +====1 +1:223c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +2:226c +3:226c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +====1 +1:254,270c + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + List entriesStrs = new ArrayList<>(); + boolean weightingMatchesButNotEdgeBased = false; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (p.isEdgeBased() == edgeBased && weightingMatches) + return p; + else if (weightingMatches) + weightingMatchesButNotEdgeBased = true; + + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + } + + String hint = weightingMatchesButNotEdgeBased + ? " The '" + Parameters.Routing.EDGE_BASED + "' url parameter is missing or does not fit the weightings. Its value was: '" + edgeBased + "'" + : ""; + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "." + hint); +2:257,263c +3:257,263c + Boolean edgeBased = map.has(Routing.EDGE_BASED) ? map.getBool(Routing.EDGE_BASED, false) : null; + Integer uTurnCosts = map.has(Routing.UTURN_COSTS) ? map.getInt(Routing.UTURN_COSTS, INFINITE_UTURN_COSTS) : null; + try { + return PCHSelector.select(getPreparations(), map, edgeBased, uTurnCosts); + } catch (NoSuchCHPreparationException e) { + throw new IllegalArgumentException(e.getMessage()); + } diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_imports_ignorespace/diff_GraphHopperIT.java.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_imports_ignorespace/diff_GraphHopperIT.java.txt new file mode 100644 index 0000000000..38bdd97c80 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_imports_ignorespace/diff_GraphHopperIT.java.txt @@ -0,0 +1,114 @@ +====1 +1:1025,1026c + // no edge_based parameter -> use node-based (because its faster) + assertMoscowNodeBased(tmpHopper, "none", true); +2:1025,1026c +3:1025,1026c + // no edge_based parameter -> use edge-based (because its there) + assertMoscowEdgeBased(tmpHopper, "none", true); +====1 +1:1053a +2:1054,1104c +3:1054,1104c + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + assertTrue(rsp.getErrors().toString().contains("Found 1 edge-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + +====1 +1:1055c + return assertMoscow(tmpHopper, edgeBasedParam, false, ch); +2:1106,1108c +3:1106,1108c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(400, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1059c + return assertMoscow(tmpHopper, edgeBasedParam, true, ch); +2:1112,1114c +3:1112,1114c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(1044, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1062c + private GHResponse assertMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean withTurnCosts, boolean ch) { +2:1117c +3:1117c + private GHResponse runMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean ch) { +====1 +1:1070,1072c + GHResponse rsp = tmpHopper.route(req); + assertEquals(withTurnCosts ? 1044 : 400, rsp.getBest().getDistance(), 1); + return rsp; +2:1125c +3:1125c + return tmpHopper.route(req); +====1 +1:1086a +2:1140,1156c +3:1140,1156c + @Test + public void testEncoderWithTurnCostSupport_stillAllows_nodeBasedRouting() { + // see #1698 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setGraphHopperLocation(tmpGraphFile). + setCHEnabled(false). + setEncodingManager(EncodingManager.create("foot,car|turn_costs=true")); + tmpHopper.importOrLoad(); + GHPoint p = new GHPoint(55.813357, 37.5958585); + GHPoint q = new GHPoint(55.811042, 37.594689); + GHRequest req = new GHRequest(p, q); + req.setVehicle("foot"); + GHResponse rsp = tmpHopper.route(req); + assertEquals("there should not be an error, but was: " + rsp.getErrors(), 0, rsp.getErrors().size()); + } + diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_imports_ignorespace/diff_changelog.txt.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_imports_ignorespace/diff_changelog.txt.txt new file mode 100644 index 0000000000..fa8ed01105 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_ort_imports_ignorespace/diff_changelog.txt.txt @@ -0,0 +1,10 @@ +==== +1:18a +2:19,20c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + It is now possible to specify finite u-turn costs for CH preparation, #1671 +3:19,22c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + removed GraphHopper.set/getTraversalMode() methods, #1705 + edge-based CH is now chosen by default if it was prepared, #1706 + it is now possible to specify finite u-turn costs for CH preparation, #1671 diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_histogram/diff_CHAlgoFactoryDecorator.java.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_histogram/diff_CHAlgoFactoryDecorator.java.txt new file mode 100644 index 0000000000..a0a5039e19 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_histogram/diff_CHAlgoFactoryDecorator.java.txt @@ -0,0 +1,126 @@ +====1 +1:19a +2:20,21c +3:20,21c + import com.carrotsearch.hppc.IntObjectHashMap; + import com.carrotsearch.hppc.IntObjectMap; +====1 +1:28c + import com.graphhopper.util.Parameters; +2:29a +3:29a +====1 +1:29a +2:31c +3:31c + import com.graphhopper.util.Parameters.Routing; +====1 +1:36a +2:39c +3:39c + import static com.graphhopper.routing.weighting.TurnWeighting.INFINITE_UTURN_COSTS; +====1 +1:223c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +2:226c +3:226c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +==== +1:254,270c + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + List entriesStrs = new ArrayList<>(); + boolean weightingMatchesButNotEdgeBased = false; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (p.isEdgeBased() == edgeBased && weightingMatches) + return p; + else if (weightingMatches) + weightingMatchesButNotEdgeBased = true; + + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + } + + String hint = weightingMatchesButNotEdgeBased + ? " The '" + Parameters.Routing.EDGE_BASED + "' url parameter is missing or does not fit the weightings. Its value was: '" + edgeBased + "'" + : ""; + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "." + hint); +2:257,327c + <<<<<<< HEAD + Boolean edgeBased = map.has(Routing.EDGE_BASED) ? map.getBool(Routing.EDGE_BASED, false) : null; + Integer uTurnCosts = map.has(Routing.UTURN_COSTS) ? map.getInt(Routing.UTURN_COSTS, INFINITE_UTURN_COSTS) : null; + try { + return PCHSelector.select(getPreparations(), map, edgeBased, uTurnCosts); + } catch (NoSuchCHPreparationException e) { + throw new IllegalArgumentException(e.getMessage()); + ||||||| 34b0d48a78 + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + List entriesStrs = new ArrayList<>(); + boolean weightingMatchesButNotEdgeBased = false; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (p.isEdgeBased() == edgeBased && weightingMatches) + return p; + else if (weightingMatches) + weightingMatchesButNotEdgeBased = true; + + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + ======= + List entriesStrs = new ArrayList<>(); + PrepareContractionHierarchies edgeBasedPCH = null; + PrepareContractionHierarchies nodeBasedPCH = null; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (weightingMatches) { + if (p.isEdgeBased()) { + edgeBasedPCH = p; + } else { + nodeBasedPCH = p; + } + } + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + >>>>>>> TEMP_RIGHT_BRANCH + } + <<<<<<< HEAD + ||||||| 34b0d48a78 + + String hint = weightingMatchesButNotEdgeBased + ? " The '" + Parameters.Routing.EDGE_BASED + "' url parameter is missing or does not fit the weightings. Its value was: '" + edgeBased + "'" + : ""; + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "." + hint); + ======= + + if (edgeBasedPCH == null && nodeBasedPCH == null) { + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "."); + } + if (map.has(Parameters.Routing.EDGE_BASED)) { + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + if (edgeBased && edgeBasedPCH != null) { + return edgeBasedPCH; + } + if (!edgeBased && nodeBasedPCH != null) { + return nodeBasedPCH; + } + + if (edgeBased) { + throw new IllegalArgumentException("Found a node-based CH preparation for weighting map " + map + ", but requested edge-based CH. " + + "You either need to configure edge-based CH preparation or set the '" + Parameters.Routing.EDGE_BASED + "' " + + "request parameter to 'false' (was 'true'). all entries: " + entriesStrs); + } else { + throw new IllegalArgumentException("Found an edge-based CH preparation for weighting map " + map + ", but requested node-based CH. " + + "You either need to configure node-based CH preparation or set the '" + Parameters.Routing.EDGE_BASED + "' " + + "request parameter to 'true' (was 'false'). all entries: " + entriesStrs); + } + } else { + // no edge_based parameter was set, we determine the CH preparation based on what is there (and prefer edge-based + // if we can choose) + return edgeBasedPCH != null ? edgeBasedPCH : nodeBasedPCH; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:257,263c + Boolean edgeBased = map.has(Routing.EDGE_BASED) ? map.getBool(Routing.EDGE_BASED, false) : null; + Integer uTurnCosts = map.has(Routing.UTURN_COSTS) ? map.getInt(Routing.UTURN_COSTS, INFINITE_UTURN_COSTS) : null; + try { + return PCHSelector.select(getPreparations(), map, edgeBased, uTurnCosts); + } catch (NoSuchCHPreparationException e) { + throw new IllegalArgumentException(e.getMessage()); + } diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_histogram/diff_GraphHopperIT.java.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_histogram/diff_GraphHopperIT.java.txt new file mode 100644 index 0000000000..e4fc7273b8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_histogram/diff_GraphHopperIT.java.txt @@ -0,0 +1,170 @@ +====1 +1:1025,1026c + // no edge_based parameter -> use node-based (because its faster) + assertMoscowNodeBased(tmpHopper, "none", true); +2:1025,1026c +3:1025,1026c + // no edge_based parameter -> use edge-based (because its there) + assertMoscowEdgeBased(tmpHopper, "none", true); +==== +1:1053a +2:1054,1109c + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + <<<<<<< HEAD + assertTrue(rsp.getErrors().toString().contains("Found 1 edge-based CH preparation")); + ||||||| 34b0d48a78 + ======= + assertTrue(rsp.getErrors().toString().contains("Found an edge-based CH preparation")); + >>>>>>> TEMP_RIGHT_BRANCH + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + +3:1054,1104c + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + assertTrue(rsp.getErrors().toString().contains("Found 1 edge-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + +====1 +1:1055c + return assertMoscow(tmpHopper, edgeBasedParam, false, ch); +2:1111,1113c +3:1106,1108c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(400, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1059c + return assertMoscow(tmpHopper, edgeBasedParam, true, ch); +2:1117,1119c +3:1112,1114c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(1044, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1062c + private GHResponse assertMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean withTurnCosts, boolean ch) { +2:1122c +3:1117c + private GHResponse runMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean ch) { +====1 +1:1070,1072c + GHResponse rsp = tmpHopper.route(req); + assertEquals(withTurnCosts ? 1044 : 400, rsp.getBest().getDistance(), 1); + return rsp; +2:1130c +3:1125c + return tmpHopper.route(req); +====1 +1:1086a +2:1145,1161c +3:1140,1156c + @Test + public void testEncoderWithTurnCostSupport_stillAllows_nodeBasedRouting() { + // see #1698 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setGraphHopperLocation(tmpGraphFile). + setCHEnabled(false). + setEncodingManager(EncodingManager.create("foot,car|turn_costs=true")); + tmpHopper.importOrLoad(); + GHPoint p = new GHPoint(55.813357, 37.5958585); + GHPoint q = new GHPoint(55.811042, 37.594689); + GHRequest req = new GHRequest(p, q); + req.setVehicle("foot"); + GHResponse rsp = tmpHopper.route(req); + assertEquals("there should not be an error, but was: " + rsp.getErrors(), 0, rsp.getErrors().size()); + } + diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_histogram/diff_changelog.txt.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_histogram/diff_changelog.txt.txt new file mode 100644 index 0000000000..9ba7d938e0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_histogram/diff_changelog.txt.txt @@ -0,0 +1,16 @@ +==== +1:18a +2:19,26c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + <<<<<<< HEAD + It is now possible to specify finite u-turn costs for CH preparation, #1671 + ||||||| 34b0d48a78 + ======= + removed GraphHopper.set/getTraversalMode() methods, #1705 + edge-based CH is now chosen by default if it was prepared, #1706 + >>>>>>> TEMP_RIGHT_BRANCH +3:19,22c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + removed GraphHopper.set/getTraversalMode() methods, #1705 + edge-based CH is now chosen by default if it was prepared, #1706 + it is now possible to specify finite u-turn costs for CH preparation, #1671 diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_ignorespace/diff_CHAlgoFactoryDecorator.java.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_ignorespace/diff_CHAlgoFactoryDecorator.java.txt new file mode 100644 index 0000000000..5bd4442275 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_ignorespace/diff_CHAlgoFactoryDecorator.java.txt @@ -0,0 +1,123 @@ +====1 +1:19a +2:20,21c +3:20,21c + import com.carrotsearch.hppc.IntObjectHashMap; + import com.carrotsearch.hppc.IntObjectMap; +====1 +1:28c + import com.graphhopper.util.Parameters; +2:29a +3:29a +====1 +1:29a +2:31c +3:31c + import com.graphhopper.util.Parameters.Routing; +====1 +1:36a +2:39c +3:39c + import static com.graphhopper.routing.weighting.TurnWeighting.INFINITE_UTURN_COSTS; +====1 +1:223c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +2:226c +3:226c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +==== +1:254,270c + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + List entriesStrs = new ArrayList<>(); + boolean weightingMatchesButNotEdgeBased = false; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (p.isEdgeBased() == edgeBased && weightingMatches) + return p; + else if (weightingMatches) + weightingMatchesButNotEdgeBased = true; + + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + } + + String hint = weightingMatchesButNotEdgeBased + ? " The '" + Parameters.Routing.EDGE_BASED + "' url parameter is missing or does not fit the weightings. Its value was: '" + edgeBased + "'" + : ""; + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "." + hint); +2:257,324c + <<<<<<< HEAD + Boolean edgeBased = map.has(Routing.EDGE_BASED) ? map.getBool(Routing.EDGE_BASED, false) : null; + Integer uTurnCosts = map.has(Routing.UTURN_COSTS) ? map.getInt(Routing.UTURN_COSTS, INFINITE_UTURN_COSTS) : null; + try { + return PCHSelector.select(getPreparations(), map, edgeBased, uTurnCosts); + } catch (NoSuchCHPreparationException e) { + throw new IllegalArgumentException(e.getMessage()); + ||||||| 34b0d48a78 + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + List entriesStrs = new ArrayList<>(); + boolean weightingMatchesButNotEdgeBased = false; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (p.isEdgeBased() == edgeBased && weightingMatches) + return p; + else if (weightingMatches) + weightingMatchesButNotEdgeBased = true; + + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + } + + String hint = weightingMatchesButNotEdgeBased + ? " The '" + Parameters.Routing.EDGE_BASED + "' url parameter is missing or does not fit the weightings. Its value was: '" + edgeBased + "'" + : ""; + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "." + hint); + ======= + List entriesStrs = new ArrayList<>(); + PrepareContractionHierarchies edgeBasedPCH = null; + PrepareContractionHierarchies nodeBasedPCH = null; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (weightingMatches) { + if (p.isEdgeBased()) { + edgeBasedPCH = p; + } else { + nodeBasedPCH = p; + } + } + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + } + + if (edgeBasedPCH == null && nodeBasedPCH == null) { + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "."); + } + if (map.has(Parameters.Routing.EDGE_BASED)) { + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + if (edgeBased && edgeBasedPCH != null) { + return edgeBasedPCH; + } + if (!edgeBased && nodeBasedPCH != null) { + return nodeBasedPCH; + } + + if (edgeBased) { + throw new IllegalArgumentException("Found a node-based CH preparation for weighting map " + map + ", but requested edge-based CH. " + + "You either need to configure edge-based CH preparation or set the '" + Parameters.Routing.EDGE_BASED + "' " + + "request parameter to 'false' (was 'true'). all entries: " + entriesStrs); + } else { + throw new IllegalArgumentException("Found an edge-based CH preparation for weighting map " + map + ", but requested node-based CH. " + + "You either need to configure node-based CH preparation or set the '" + Parameters.Routing.EDGE_BASED + "' " + + "request parameter to 'true' (was 'false'). all entries: " + entriesStrs); + } + } else { + // no edge_based parameter was set, we determine the CH preparation based on what is there (and prefer edge-based + // if we can choose) + return edgeBasedPCH != null ? edgeBasedPCH : nodeBasedPCH; + >>>>>>> TEMP_RIGHT_BRANCH + } +3:257,263c + Boolean edgeBased = map.has(Routing.EDGE_BASED) ? map.getBool(Routing.EDGE_BASED, false) : null; + Integer uTurnCosts = map.has(Routing.UTURN_COSTS) ? map.getInt(Routing.UTURN_COSTS, INFINITE_UTURN_COSTS) : null; + try { + return PCHSelector.select(getPreparations(), map, edgeBased, uTurnCosts); + } catch (NoSuchCHPreparationException e) { + throw new IllegalArgumentException(e.getMessage()); + } diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_ignorespace/diff_GraphHopperIT.java.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_ignorespace/diff_GraphHopperIT.java.txt new file mode 100644 index 0000000000..e4fc7273b8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_ignorespace/diff_GraphHopperIT.java.txt @@ -0,0 +1,170 @@ +====1 +1:1025,1026c + // no edge_based parameter -> use node-based (because its faster) + assertMoscowNodeBased(tmpHopper, "none", true); +2:1025,1026c +3:1025,1026c + // no edge_based parameter -> use edge-based (because its there) + assertMoscowEdgeBased(tmpHopper, "none", true); +==== +1:1053a +2:1054,1109c + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + <<<<<<< HEAD + assertTrue(rsp.getErrors().toString().contains("Found 1 edge-based CH preparation")); + ||||||| 34b0d48a78 + ======= + assertTrue(rsp.getErrors().toString().contains("Found an edge-based CH preparation")); + >>>>>>> TEMP_RIGHT_BRANCH + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + +3:1054,1104c + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + assertTrue(rsp.getErrors().toString().contains("Found 1 edge-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + +====1 +1:1055c + return assertMoscow(tmpHopper, edgeBasedParam, false, ch); +2:1111,1113c +3:1106,1108c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(400, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1059c + return assertMoscow(tmpHopper, edgeBasedParam, true, ch); +2:1117,1119c +3:1112,1114c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(1044, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1062c + private GHResponse assertMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean withTurnCosts, boolean ch) { +2:1122c +3:1117c + private GHResponse runMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean ch) { +====1 +1:1070,1072c + GHResponse rsp = tmpHopper.route(req); + assertEquals(withTurnCosts ? 1044 : 400, rsp.getBest().getDistance(), 1); + return rsp; +2:1130c +3:1125c + return tmpHopper.route(req); +====1 +1:1086a +2:1145,1161c +3:1140,1156c + @Test + public void testEncoderWithTurnCostSupport_stillAllows_nodeBasedRouting() { + // see #1698 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setGraphHopperLocation(tmpGraphFile). + setCHEnabled(false). + setEncodingManager(EncodingManager.create("foot,car|turn_costs=true")); + tmpHopper.importOrLoad(); + GHPoint p = new GHPoint(55.813357, 37.5958585); + GHPoint q = new GHPoint(55.811042, 37.594689); + GHRequest req = new GHRequest(p, q); + req.setVehicle("foot"); + GHResponse rsp = tmpHopper.route(req); + assertEquals("there should not be an error, but was: " + rsp.getErrors(), 0, rsp.getErrors().size()); + } + diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_ignorespace/diff_changelog.txt.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_ignorespace/diff_changelog.txt.txt new file mode 100644 index 0000000000..9ba7d938e0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_ignorespace/diff_changelog.txt.txt @@ -0,0 +1,16 @@ +==== +1:18a +2:19,26c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + <<<<<<< HEAD + It is now possible to specify finite u-turn costs for CH preparation, #1671 + ||||||| 34b0d48a78 + ======= + removed GraphHopper.set/getTraversalMode() methods, #1705 + edge-based CH is now chosen by default if it was prepared, #1706 + >>>>>>> TEMP_RIGHT_BRANCH +3:19,22c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + removed GraphHopper.set/getTraversalMode() methods, #1705 + edge-based CH is now chosen by default if it was prepared, #1706 + it is now possible to specify finite u-turn costs for CH preparation, #1671 diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_minimal/diff_CHAlgoFactoryDecorator.java.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_minimal/diff_CHAlgoFactoryDecorator.java.txt new file mode 100644 index 0000000000..a0a5039e19 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_minimal/diff_CHAlgoFactoryDecorator.java.txt @@ -0,0 +1,126 @@ +====1 +1:19a +2:20,21c +3:20,21c + import com.carrotsearch.hppc.IntObjectHashMap; + import com.carrotsearch.hppc.IntObjectMap; +====1 +1:28c + import com.graphhopper.util.Parameters; +2:29a +3:29a +====1 +1:29a +2:31c +3:31c + import com.graphhopper.util.Parameters.Routing; +====1 +1:36a +2:39c +3:39c + import static com.graphhopper.routing.weighting.TurnWeighting.INFINITE_UTURN_COSTS; +====1 +1:223c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +2:226c +3:226c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +==== +1:254,270c + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + List entriesStrs = new ArrayList<>(); + boolean weightingMatchesButNotEdgeBased = false; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (p.isEdgeBased() == edgeBased && weightingMatches) + return p; + else if (weightingMatches) + weightingMatchesButNotEdgeBased = true; + + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + } + + String hint = weightingMatchesButNotEdgeBased + ? " The '" + Parameters.Routing.EDGE_BASED + "' url parameter is missing or does not fit the weightings. Its value was: '" + edgeBased + "'" + : ""; + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "." + hint); +2:257,327c + <<<<<<< HEAD + Boolean edgeBased = map.has(Routing.EDGE_BASED) ? map.getBool(Routing.EDGE_BASED, false) : null; + Integer uTurnCosts = map.has(Routing.UTURN_COSTS) ? map.getInt(Routing.UTURN_COSTS, INFINITE_UTURN_COSTS) : null; + try { + return PCHSelector.select(getPreparations(), map, edgeBased, uTurnCosts); + } catch (NoSuchCHPreparationException e) { + throw new IllegalArgumentException(e.getMessage()); + ||||||| 34b0d48a78 + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + List entriesStrs = new ArrayList<>(); + boolean weightingMatchesButNotEdgeBased = false; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (p.isEdgeBased() == edgeBased && weightingMatches) + return p; + else if (weightingMatches) + weightingMatchesButNotEdgeBased = true; + + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + ======= + List entriesStrs = new ArrayList<>(); + PrepareContractionHierarchies edgeBasedPCH = null; + PrepareContractionHierarchies nodeBasedPCH = null; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (weightingMatches) { + if (p.isEdgeBased()) { + edgeBasedPCH = p; + } else { + nodeBasedPCH = p; + } + } + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + >>>>>>> TEMP_RIGHT_BRANCH + } + <<<<<<< HEAD + ||||||| 34b0d48a78 + + String hint = weightingMatchesButNotEdgeBased + ? " The '" + Parameters.Routing.EDGE_BASED + "' url parameter is missing or does not fit the weightings. Its value was: '" + edgeBased + "'" + : ""; + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "." + hint); + ======= + + if (edgeBasedPCH == null && nodeBasedPCH == null) { + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "."); + } + if (map.has(Parameters.Routing.EDGE_BASED)) { + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + if (edgeBased && edgeBasedPCH != null) { + return edgeBasedPCH; + } + if (!edgeBased && nodeBasedPCH != null) { + return nodeBasedPCH; + } + + if (edgeBased) { + throw new IllegalArgumentException("Found a node-based CH preparation for weighting map " + map + ", but requested edge-based CH. " + + "You either need to configure edge-based CH preparation or set the '" + Parameters.Routing.EDGE_BASED + "' " + + "request parameter to 'false' (was 'true'). all entries: " + entriesStrs); + } else { + throw new IllegalArgumentException("Found an edge-based CH preparation for weighting map " + map + ", but requested node-based CH. " + + "You either need to configure node-based CH preparation or set the '" + Parameters.Routing.EDGE_BASED + "' " + + "request parameter to 'true' (was 'false'). all entries: " + entriesStrs); + } + } else { + // no edge_based parameter was set, we determine the CH preparation based on what is there (and prefer edge-based + // if we can choose) + return edgeBasedPCH != null ? edgeBasedPCH : nodeBasedPCH; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:257,263c + Boolean edgeBased = map.has(Routing.EDGE_BASED) ? map.getBool(Routing.EDGE_BASED, false) : null; + Integer uTurnCosts = map.has(Routing.UTURN_COSTS) ? map.getInt(Routing.UTURN_COSTS, INFINITE_UTURN_COSTS) : null; + try { + return PCHSelector.select(getPreparations(), map, edgeBased, uTurnCosts); + } catch (NoSuchCHPreparationException e) { + throw new IllegalArgumentException(e.getMessage()); + } diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_minimal/diff_GraphHopperIT.java.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_minimal/diff_GraphHopperIT.java.txt new file mode 100644 index 0000000000..e4fc7273b8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_minimal/diff_GraphHopperIT.java.txt @@ -0,0 +1,170 @@ +====1 +1:1025,1026c + // no edge_based parameter -> use node-based (because its faster) + assertMoscowNodeBased(tmpHopper, "none", true); +2:1025,1026c +3:1025,1026c + // no edge_based parameter -> use edge-based (because its there) + assertMoscowEdgeBased(tmpHopper, "none", true); +==== +1:1053a +2:1054,1109c + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + <<<<<<< HEAD + assertTrue(rsp.getErrors().toString().contains("Found 1 edge-based CH preparation")); + ||||||| 34b0d48a78 + ======= + assertTrue(rsp.getErrors().toString().contains("Found an edge-based CH preparation")); + >>>>>>> TEMP_RIGHT_BRANCH + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + +3:1054,1104c + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + assertTrue(rsp.getErrors().toString().contains("Found 1 edge-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + +====1 +1:1055c + return assertMoscow(tmpHopper, edgeBasedParam, false, ch); +2:1111,1113c +3:1106,1108c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(400, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1059c + return assertMoscow(tmpHopper, edgeBasedParam, true, ch); +2:1117,1119c +3:1112,1114c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(1044, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1062c + private GHResponse assertMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean withTurnCosts, boolean ch) { +2:1122c +3:1117c + private GHResponse runMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean ch) { +====1 +1:1070,1072c + GHResponse rsp = tmpHopper.route(req); + assertEquals(withTurnCosts ? 1044 : 400, rsp.getBest().getDistance(), 1); + return rsp; +2:1130c +3:1125c + return tmpHopper.route(req); +====1 +1:1086a +2:1145,1161c +3:1140,1156c + @Test + public void testEncoderWithTurnCostSupport_stillAllows_nodeBasedRouting() { + // see #1698 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setGraphHopperLocation(tmpGraphFile). + setCHEnabled(false). + setEncodingManager(EncodingManager.create("foot,car|turn_costs=true")); + tmpHopper.importOrLoad(); + GHPoint p = new GHPoint(55.813357, 37.5958585); + GHPoint q = new GHPoint(55.811042, 37.594689); + GHRequest req = new GHRequest(p, q); + req.setVehicle("foot"); + GHResponse rsp = tmpHopper.route(req); + assertEquals("there should not be an error, but was: " + rsp.getErrors(), 0, rsp.getErrors().size()); + } + diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_minimal/diff_changelog.txt.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_minimal/diff_changelog.txt.txt new file mode 100644 index 0000000000..9ba7d938e0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_minimal/diff_changelog.txt.txt @@ -0,0 +1,16 @@ +==== +1:18a +2:19,26c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + <<<<<<< HEAD + It is now possible to specify finite u-turn costs for CH preparation, #1671 + ||||||| 34b0d48a78 + ======= + removed GraphHopper.set/getTraversalMode() methods, #1705 + edge-based CH is now chosen by default if it was prepared, #1706 + >>>>>>> TEMP_RIGHT_BRANCH +3:19,22c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + removed GraphHopper.set/getTraversalMode() methods, #1705 + edge-based CH is now chosen by default if it was prepared, #1706 + it is now possible to specify finite u-turn costs for CH preparation, #1671 diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_myers/diff_CHAlgoFactoryDecorator.java.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_myers/diff_CHAlgoFactoryDecorator.java.txt new file mode 100644 index 0000000000..a0a5039e19 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_myers/diff_CHAlgoFactoryDecorator.java.txt @@ -0,0 +1,126 @@ +====1 +1:19a +2:20,21c +3:20,21c + import com.carrotsearch.hppc.IntObjectHashMap; + import com.carrotsearch.hppc.IntObjectMap; +====1 +1:28c + import com.graphhopper.util.Parameters; +2:29a +3:29a +====1 +1:29a +2:31c +3:31c + import com.graphhopper.util.Parameters.Routing; +====1 +1:36a +2:39c +3:39c + import static com.graphhopper.routing.weighting.TurnWeighting.INFINITE_UTURN_COSTS; +====1 +1:223c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +2:226c +3:226c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +==== +1:254,270c + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + List entriesStrs = new ArrayList<>(); + boolean weightingMatchesButNotEdgeBased = false; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (p.isEdgeBased() == edgeBased && weightingMatches) + return p; + else if (weightingMatches) + weightingMatchesButNotEdgeBased = true; + + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + } + + String hint = weightingMatchesButNotEdgeBased + ? " The '" + Parameters.Routing.EDGE_BASED + "' url parameter is missing or does not fit the weightings. Its value was: '" + edgeBased + "'" + : ""; + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "." + hint); +2:257,327c + <<<<<<< HEAD + Boolean edgeBased = map.has(Routing.EDGE_BASED) ? map.getBool(Routing.EDGE_BASED, false) : null; + Integer uTurnCosts = map.has(Routing.UTURN_COSTS) ? map.getInt(Routing.UTURN_COSTS, INFINITE_UTURN_COSTS) : null; + try { + return PCHSelector.select(getPreparations(), map, edgeBased, uTurnCosts); + } catch (NoSuchCHPreparationException e) { + throw new IllegalArgumentException(e.getMessage()); + ||||||| 34b0d48a78 + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + List entriesStrs = new ArrayList<>(); + boolean weightingMatchesButNotEdgeBased = false; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (p.isEdgeBased() == edgeBased && weightingMatches) + return p; + else if (weightingMatches) + weightingMatchesButNotEdgeBased = true; + + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + ======= + List entriesStrs = new ArrayList<>(); + PrepareContractionHierarchies edgeBasedPCH = null; + PrepareContractionHierarchies nodeBasedPCH = null; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (weightingMatches) { + if (p.isEdgeBased()) { + edgeBasedPCH = p; + } else { + nodeBasedPCH = p; + } + } + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + >>>>>>> TEMP_RIGHT_BRANCH + } + <<<<<<< HEAD + ||||||| 34b0d48a78 + + String hint = weightingMatchesButNotEdgeBased + ? " The '" + Parameters.Routing.EDGE_BASED + "' url parameter is missing or does not fit the weightings. Its value was: '" + edgeBased + "'" + : ""; + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "." + hint); + ======= + + if (edgeBasedPCH == null && nodeBasedPCH == null) { + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "."); + } + if (map.has(Parameters.Routing.EDGE_BASED)) { + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + if (edgeBased && edgeBasedPCH != null) { + return edgeBasedPCH; + } + if (!edgeBased && nodeBasedPCH != null) { + return nodeBasedPCH; + } + + if (edgeBased) { + throw new IllegalArgumentException("Found a node-based CH preparation for weighting map " + map + ", but requested edge-based CH. " + + "You either need to configure edge-based CH preparation or set the '" + Parameters.Routing.EDGE_BASED + "' " + + "request parameter to 'false' (was 'true'). all entries: " + entriesStrs); + } else { + throw new IllegalArgumentException("Found an edge-based CH preparation for weighting map " + map + ", but requested node-based CH. " + + "You either need to configure node-based CH preparation or set the '" + Parameters.Routing.EDGE_BASED + "' " + + "request parameter to 'true' (was 'false'). all entries: " + entriesStrs); + } + } else { + // no edge_based parameter was set, we determine the CH preparation based on what is there (and prefer edge-based + // if we can choose) + return edgeBasedPCH != null ? edgeBasedPCH : nodeBasedPCH; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:257,263c + Boolean edgeBased = map.has(Routing.EDGE_BASED) ? map.getBool(Routing.EDGE_BASED, false) : null; + Integer uTurnCosts = map.has(Routing.UTURN_COSTS) ? map.getInt(Routing.UTURN_COSTS, INFINITE_UTURN_COSTS) : null; + try { + return PCHSelector.select(getPreparations(), map, edgeBased, uTurnCosts); + } catch (NoSuchCHPreparationException e) { + throw new IllegalArgumentException(e.getMessage()); + } diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_myers/diff_GraphHopperIT.java.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_myers/diff_GraphHopperIT.java.txt new file mode 100644 index 0000000000..e4fc7273b8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_myers/diff_GraphHopperIT.java.txt @@ -0,0 +1,170 @@ +====1 +1:1025,1026c + // no edge_based parameter -> use node-based (because its faster) + assertMoscowNodeBased(tmpHopper, "none", true); +2:1025,1026c +3:1025,1026c + // no edge_based parameter -> use edge-based (because its there) + assertMoscowEdgeBased(tmpHopper, "none", true); +==== +1:1053a +2:1054,1109c + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + <<<<<<< HEAD + assertTrue(rsp.getErrors().toString().contains("Found 1 edge-based CH preparation")); + ||||||| 34b0d48a78 + ======= + assertTrue(rsp.getErrors().toString().contains("Found an edge-based CH preparation")); + >>>>>>> TEMP_RIGHT_BRANCH + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + +3:1054,1104c + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + assertTrue(rsp.getErrors().toString().contains("Found 1 edge-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + +====1 +1:1055c + return assertMoscow(tmpHopper, edgeBasedParam, false, ch); +2:1111,1113c +3:1106,1108c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(400, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1059c + return assertMoscow(tmpHopper, edgeBasedParam, true, ch); +2:1117,1119c +3:1112,1114c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(1044, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1062c + private GHResponse assertMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean withTurnCosts, boolean ch) { +2:1122c +3:1117c + private GHResponse runMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean ch) { +====1 +1:1070,1072c + GHResponse rsp = tmpHopper.route(req); + assertEquals(withTurnCosts ? 1044 : 400, rsp.getBest().getDistance(), 1); + return rsp; +2:1130c +3:1125c + return tmpHopper.route(req); +====1 +1:1086a +2:1145,1161c +3:1140,1156c + @Test + public void testEncoderWithTurnCostSupport_stillAllows_nodeBasedRouting() { + // see #1698 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setGraphHopperLocation(tmpGraphFile). + setCHEnabled(false). + setEncodingManager(EncodingManager.create("foot,car|turn_costs=true")); + tmpHopper.importOrLoad(); + GHPoint p = new GHPoint(55.813357, 37.5958585); + GHPoint q = new GHPoint(55.811042, 37.594689); + GHRequest req = new GHRequest(p, q); + req.setVehicle("foot"); + GHResponse rsp = tmpHopper.route(req); + assertEquals("there should not be an error, but was: " + rsp.getErrors(), 0, rsp.getErrors().size()); + } + diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_myers/diff_changelog.txt.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_myers/diff_changelog.txt.txt new file mode 100644 index 0000000000..9ba7d938e0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_myers/diff_changelog.txt.txt @@ -0,0 +1,16 @@ +==== +1:18a +2:19,26c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + <<<<<<< HEAD + It is now possible to specify finite u-turn costs for CH preparation, #1671 + ||||||| 34b0d48a78 + ======= + removed GraphHopper.set/getTraversalMode() methods, #1705 + edge-based CH is now chosen by default if it was prepared, #1706 + >>>>>>> TEMP_RIGHT_BRANCH +3:19,22c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + removed GraphHopper.set/getTraversalMode() methods, #1705 + edge-based CH is now chosen by default if it was prepared, #1706 + it is now possible to specify finite u-turn costs for CH preparation, #1671 diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_patience/diff_CHAlgoFactoryDecorator.java.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_patience/diff_CHAlgoFactoryDecorator.java.txt new file mode 100644 index 0000000000..a0a5039e19 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_patience/diff_CHAlgoFactoryDecorator.java.txt @@ -0,0 +1,126 @@ +====1 +1:19a +2:20,21c +3:20,21c + import com.carrotsearch.hppc.IntObjectHashMap; + import com.carrotsearch.hppc.IntObjectMap; +====1 +1:28c + import com.graphhopper.util.Parameters; +2:29a +3:29a +====1 +1:29a +2:31c +3:31c + import com.graphhopper.util.Parameters.Routing; +====1 +1:36a +2:39c +3:39c + import static com.graphhopper.routing.weighting.TurnWeighting.INFINITE_UTURN_COSTS; +====1 +1:223c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +2:226c +3:226c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +==== +1:254,270c + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + List entriesStrs = new ArrayList<>(); + boolean weightingMatchesButNotEdgeBased = false; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (p.isEdgeBased() == edgeBased && weightingMatches) + return p; + else if (weightingMatches) + weightingMatchesButNotEdgeBased = true; + + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + } + + String hint = weightingMatchesButNotEdgeBased + ? " The '" + Parameters.Routing.EDGE_BASED + "' url parameter is missing or does not fit the weightings. Its value was: '" + edgeBased + "'" + : ""; + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "." + hint); +2:257,327c + <<<<<<< HEAD + Boolean edgeBased = map.has(Routing.EDGE_BASED) ? map.getBool(Routing.EDGE_BASED, false) : null; + Integer uTurnCosts = map.has(Routing.UTURN_COSTS) ? map.getInt(Routing.UTURN_COSTS, INFINITE_UTURN_COSTS) : null; + try { + return PCHSelector.select(getPreparations(), map, edgeBased, uTurnCosts); + } catch (NoSuchCHPreparationException e) { + throw new IllegalArgumentException(e.getMessage()); + ||||||| 34b0d48a78 + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + List entriesStrs = new ArrayList<>(); + boolean weightingMatchesButNotEdgeBased = false; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (p.isEdgeBased() == edgeBased && weightingMatches) + return p; + else if (weightingMatches) + weightingMatchesButNotEdgeBased = true; + + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + ======= + List entriesStrs = new ArrayList<>(); + PrepareContractionHierarchies edgeBasedPCH = null; + PrepareContractionHierarchies nodeBasedPCH = null; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (weightingMatches) { + if (p.isEdgeBased()) { + edgeBasedPCH = p; + } else { + nodeBasedPCH = p; + } + } + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + >>>>>>> TEMP_RIGHT_BRANCH + } + <<<<<<< HEAD + ||||||| 34b0d48a78 + + String hint = weightingMatchesButNotEdgeBased + ? " The '" + Parameters.Routing.EDGE_BASED + "' url parameter is missing or does not fit the weightings. Its value was: '" + edgeBased + "'" + : ""; + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "." + hint); + ======= + + if (edgeBasedPCH == null && nodeBasedPCH == null) { + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "."); + } + if (map.has(Parameters.Routing.EDGE_BASED)) { + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + if (edgeBased && edgeBasedPCH != null) { + return edgeBasedPCH; + } + if (!edgeBased && nodeBasedPCH != null) { + return nodeBasedPCH; + } + + if (edgeBased) { + throw new IllegalArgumentException("Found a node-based CH preparation for weighting map " + map + ", but requested edge-based CH. " + + "You either need to configure edge-based CH preparation or set the '" + Parameters.Routing.EDGE_BASED + "' " + + "request parameter to 'false' (was 'true'). all entries: " + entriesStrs); + } else { + throw new IllegalArgumentException("Found an edge-based CH preparation for weighting map " + map + ", but requested node-based CH. " + + "You either need to configure node-based CH preparation or set the '" + Parameters.Routing.EDGE_BASED + "' " + + "request parameter to 'true' (was 'false'). all entries: " + entriesStrs); + } + } else { + // no edge_based parameter was set, we determine the CH preparation based on what is there (and prefer edge-based + // if we can choose) + return edgeBasedPCH != null ? edgeBasedPCH : nodeBasedPCH; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:257,263c + Boolean edgeBased = map.has(Routing.EDGE_BASED) ? map.getBool(Routing.EDGE_BASED, false) : null; + Integer uTurnCosts = map.has(Routing.UTURN_COSTS) ? map.getInt(Routing.UTURN_COSTS, INFINITE_UTURN_COSTS) : null; + try { + return PCHSelector.select(getPreparations(), map, edgeBased, uTurnCosts); + } catch (NoSuchCHPreparationException e) { + throw new IllegalArgumentException(e.getMessage()); + } diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_patience/diff_GraphHopperIT.java.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_patience/diff_GraphHopperIT.java.txt new file mode 100644 index 0000000000..e4fc7273b8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_patience/diff_GraphHopperIT.java.txt @@ -0,0 +1,170 @@ +====1 +1:1025,1026c + // no edge_based parameter -> use node-based (because its faster) + assertMoscowNodeBased(tmpHopper, "none", true); +2:1025,1026c +3:1025,1026c + // no edge_based parameter -> use edge-based (because its there) + assertMoscowEdgeBased(tmpHopper, "none", true); +==== +1:1053a +2:1054,1109c + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + <<<<<<< HEAD + assertTrue(rsp.getErrors().toString().contains("Found 1 edge-based CH preparation")); + ||||||| 34b0d48a78 + ======= + assertTrue(rsp.getErrors().toString().contains("Found an edge-based CH preparation")); + >>>>>>> TEMP_RIGHT_BRANCH + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + +3:1054,1104c + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + assertTrue(rsp.getErrors().toString().contains("Found 1 edge-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + +====1 +1:1055c + return assertMoscow(tmpHopper, edgeBasedParam, false, ch); +2:1111,1113c +3:1106,1108c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(400, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1059c + return assertMoscow(tmpHopper, edgeBasedParam, true, ch); +2:1117,1119c +3:1112,1114c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(1044, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1062c + private GHResponse assertMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean withTurnCosts, boolean ch) { +2:1122c +3:1117c + private GHResponse runMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean ch) { +====1 +1:1070,1072c + GHResponse rsp = tmpHopper.route(req); + assertEquals(withTurnCosts ? 1044 : 400, rsp.getBest().getDistance(), 1); + return rsp; +2:1130c +3:1125c + return tmpHopper.route(req); +====1 +1:1086a +2:1145,1161c +3:1140,1156c + @Test + public void testEncoderWithTurnCostSupport_stillAllows_nodeBasedRouting() { + // see #1698 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setGraphHopperLocation(tmpGraphFile). + setCHEnabled(false). + setEncodingManager(EncodingManager.create("foot,car|turn_costs=true")); + tmpHopper.importOrLoad(); + GHPoint p = new GHPoint(55.813357, 37.5958585); + GHPoint q = new GHPoint(55.811042, 37.594689); + GHRequest req = new GHRequest(p, q); + req.setVehicle("foot"); + GHResponse rsp = tmpHopper.route(req); + assertEquals("there should not be an error, but was: " + rsp.getErrors(), 0, rsp.getErrors().size()); + } + diff --git a/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_patience/diff_changelog.txt.txt b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_patience/diff_changelog.txt.txt new file mode 100644 index 0000000000..9ba7d938e0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/gitmerge_recursive_patience/diff_changelog.txt.txt @@ -0,0 +1,16 @@ +==== +1:18a +2:19,26c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + <<<<<<< HEAD + It is now possible to specify finite u-turn costs for CH preparation, #1671 + ||||||| 34b0d48a78 + ======= + removed GraphHopper.set/getTraversalMode() methods, #1705 + edge-based CH is now chosen by default if it was prepared, #1706 + >>>>>>> TEMP_RIGHT_BRANCH +3:19,22c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + removed GraphHopper.set/getTraversalMode() methods, #1705 + edge-based CH is now chosen by default if it was prepared, #1706 + it is now possible to specify finite u-turn costs for CH preparation, #1671 diff --git a/src/python/merge_conflict_analysis_diffs/1177/intellimerge/diff_CHAlgoFactoryDecorator.java.txt b/src/python/merge_conflict_analysis_diffs/1177/intellimerge/diff_CHAlgoFactoryDecorator.java.txt new file mode 100644 index 0000000000..a0a5039e19 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/intellimerge/diff_CHAlgoFactoryDecorator.java.txt @@ -0,0 +1,126 @@ +====1 +1:19a +2:20,21c +3:20,21c + import com.carrotsearch.hppc.IntObjectHashMap; + import com.carrotsearch.hppc.IntObjectMap; +====1 +1:28c + import com.graphhopper.util.Parameters; +2:29a +3:29a +====1 +1:29a +2:31c +3:31c + import com.graphhopper.util.Parameters.Routing; +====1 +1:36a +2:39c +3:39c + import static com.graphhopper.routing.weighting.TurnWeighting.INFINITE_UTURN_COSTS; +====1 +1:223c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +2:226c +3:226c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +==== +1:254,270c + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + List entriesStrs = new ArrayList<>(); + boolean weightingMatchesButNotEdgeBased = false; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (p.isEdgeBased() == edgeBased && weightingMatches) + return p; + else if (weightingMatches) + weightingMatchesButNotEdgeBased = true; + + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + } + + String hint = weightingMatchesButNotEdgeBased + ? " The '" + Parameters.Routing.EDGE_BASED + "' url parameter is missing or does not fit the weightings. Its value was: '" + edgeBased + "'" + : ""; + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "." + hint); +2:257,327c + <<<<<<< HEAD + Boolean edgeBased = map.has(Routing.EDGE_BASED) ? map.getBool(Routing.EDGE_BASED, false) : null; + Integer uTurnCosts = map.has(Routing.UTURN_COSTS) ? map.getInt(Routing.UTURN_COSTS, INFINITE_UTURN_COSTS) : null; + try { + return PCHSelector.select(getPreparations(), map, edgeBased, uTurnCosts); + } catch (NoSuchCHPreparationException e) { + throw new IllegalArgumentException(e.getMessage()); + ||||||| 34b0d48a78 + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + List entriesStrs = new ArrayList<>(); + boolean weightingMatchesButNotEdgeBased = false; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (p.isEdgeBased() == edgeBased && weightingMatches) + return p; + else if (weightingMatches) + weightingMatchesButNotEdgeBased = true; + + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + ======= + List entriesStrs = new ArrayList<>(); + PrepareContractionHierarchies edgeBasedPCH = null; + PrepareContractionHierarchies nodeBasedPCH = null; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (weightingMatches) { + if (p.isEdgeBased()) { + edgeBasedPCH = p; + } else { + nodeBasedPCH = p; + } + } + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + >>>>>>> TEMP_RIGHT_BRANCH + } + <<<<<<< HEAD + ||||||| 34b0d48a78 + + String hint = weightingMatchesButNotEdgeBased + ? " The '" + Parameters.Routing.EDGE_BASED + "' url parameter is missing or does not fit the weightings. Its value was: '" + edgeBased + "'" + : ""; + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "." + hint); + ======= + + if (edgeBasedPCH == null && nodeBasedPCH == null) { + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "."); + } + if (map.has(Parameters.Routing.EDGE_BASED)) { + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + if (edgeBased && edgeBasedPCH != null) { + return edgeBasedPCH; + } + if (!edgeBased && nodeBasedPCH != null) { + return nodeBasedPCH; + } + + if (edgeBased) { + throw new IllegalArgumentException("Found a node-based CH preparation for weighting map " + map + ", but requested edge-based CH. " + + "You either need to configure edge-based CH preparation or set the '" + Parameters.Routing.EDGE_BASED + "' " + + "request parameter to 'false' (was 'true'). all entries: " + entriesStrs); + } else { + throw new IllegalArgumentException("Found an edge-based CH preparation for weighting map " + map + ", but requested node-based CH. " + + "You either need to configure node-based CH preparation or set the '" + Parameters.Routing.EDGE_BASED + "' " + + "request parameter to 'true' (was 'false'). all entries: " + entriesStrs); + } + } else { + // no edge_based parameter was set, we determine the CH preparation based on what is there (and prefer edge-based + // if we can choose) + return edgeBasedPCH != null ? edgeBasedPCH : nodeBasedPCH; + } + >>>>>>> TEMP_RIGHT_BRANCH +3:257,263c + Boolean edgeBased = map.has(Routing.EDGE_BASED) ? map.getBool(Routing.EDGE_BASED, false) : null; + Integer uTurnCosts = map.has(Routing.UTURN_COSTS) ? map.getInt(Routing.UTURN_COSTS, INFINITE_UTURN_COSTS) : null; + try { + return PCHSelector.select(getPreparations(), map, edgeBased, uTurnCosts); + } catch (NoSuchCHPreparationException e) { + throw new IllegalArgumentException(e.getMessage()); + } diff --git a/src/python/merge_conflict_analysis_diffs/1177/intellimerge/diff_GraphHopperIT.java.txt b/src/python/merge_conflict_analysis_diffs/1177/intellimerge/diff_GraphHopperIT.java.txt new file mode 100644 index 0000000000..aef2810b3b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/intellimerge/diff_GraphHopperIT.java.txt @@ -0,0 +1,220 @@ +====1 +1:1025,1026c + // no edge_based parameter -> use node-based (because its faster) + assertMoscowNodeBased(tmpHopper, "none", true); +2:1025,1026c +3:1025,1026c + // no edge_based parameter -> use edge-based (because its there) + assertMoscowEdgeBased(tmpHopper, "none", true); +==== +1:1053a +2:1054,1159c + <<<<<<< HEAD + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + assertTrue(rsp.getErrors().toString().contains("Found 1 edge-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + + ||||||| 34b0d48a78 + ======= + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + assertTrue(rsp.getErrors().toString().contains("Found an edge-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + + >>>>>>> TEMP_RIGHT_BRANCH +3:1054,1104c + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + assertTrue(rsp.getErrors().toString().contains("Found 1 edge-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + +====1 +1:1055c + return assertMoscow(tmpHopper, edgeBasedParam, false, ch); +2:1161,1163c +3:1106,1108c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(400, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1059c + return assertMoscow(tmpHopper, edgeBasedParam, true, ch); +2:1167,1169c +3:1112,1114c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(1044, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1062c + private GHResponse assertMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean withTurnCosts, boolean ch) { +2:1172c +3:1117c + private GHResponse runMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean ch) { +====1 +1:1070,1072c + GHResponse rsp = tmpHopper.route(req); + assertEquals(withTurnCosts ? 1044 : 400, rsp.getBest().getDistance(), 1); + return rsp; +2:1180c +3:1125c + return tmpHopper.route(req); +====1 +1:1086a +2:1195,1211c +3:1140,1156c + @Test + public void testEncoderWithTurnCostSupport_stillAllows_nodeBasedRouting() { + // see #1698 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setGraphHopperLocation(tmpGraphFile). + setCHEnabled(false). + setEncodingManager(EncodingManager.create("foot,car|turn_costs=true")); + tmpHopper.importOrLoad(); + GHPoint p = new GHPoint(55.813357, 37.5958585); + GHPoint q = new GHPoint(55.811042, 37.594689); + GHRequest req = new GHRequest(p, q); + req.setVehicle("foot"); + GHResponse rsp = tmpHopper.route(req); + assertEquals("there should not be an error, but was: " + rsp.getErrors(), 0, rsp.getErrors().size()); + } + diff --git a/src/python/merge_conflict_analysis_diffs/1177/intellimerge/diff_changelog.txt.txt b/src/python/merge_conflict_analysis_diffs/1177/intellimerge/diff_changelog.txt.txt new file mode 100644 index 0000000000..29c999de42 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/intellimerge/diff_changelog.txt.txt @@ -0,0 +1,17 @@ +==== +1:18a +2:19,27c + <<<<<<< HEAD + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + It is now possible to specify finite u-turn costs for CH preparation, #1671 + ||||||| 34b0d48a78 + ======= + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + removed GraphHopper.set/getTraversalMode() methods, #1705 + edge-based CH is now chosen by default if it was prepared, #1706 + >>>>>>> TEMP_RIGHT_BRANCH +3:19,22c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + removed GraphHopper.set/getTraversalMode() methods, #1705 + edge-based CH is now chosen by default if it was prepared, #1706 + it is now possible to specify finite u-turn costs for CH preparation, #1671 diff --git a/src/python/merge_conflict_analysis_diffs/1177/spork/diff_CHAlgoFactoryDecorator.java.txt b/src/python/merge_conflict_analysis_diffs/1177/spork/diff_CHAlgoFactoryDecorator.java.txt new file mode 100644 index 0000000000..1819967566 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/spork/diff_CHAlgoFactoryDecorator.java.txt @@ -0,0 +1,55 @@ +====1 +1:19a +2:20,21c +3:20,21c + import com.carrotsearch.hppc.IntObjectHashMap; + import com.carrotsearch.hppc.IntObjectMap; +====1 +1:28c + import com.graphhopper.util.Parameters; +2:29a +3:29a +====1 +1:29a +2:31c +3:31c + import com.graphhopper.util.Parameters.Routing; +====1 +1:36a +2:39c +3:39c + import static com.graphhopper.routing.weighting.TurnWeighting.INFINITE_UTURN_COSTS; +====1 +1:223c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +2:226c +3:226c + * "fastest|u_turn_costs=30 or your own weight-calculation type. +====1 +1:254,270c + boolean edgeBased = map.getBool(Parameters.Routing.EDGE_BASED, false); + List entriesStrs = new ArrayList<>(); + boolean weightingMatchesButNotEdgeBased = false; + for (PrepareContractionHierarchies p : getPreparations()) { + boolean weightingMatches = p.getCHProfile().getWeighting().matches(map); + if (p.isEdgeBased() == edgeBased && weightingMatches) + return p; + else if (weightingMatches) + weightingMatchesButNotEdgeBased = true; + + entriesStrs.add(p.getCHProfile().getWeighting() + "|" + (p.getCHProfile().isEdgeBased() ? "edge" : "node")); + } + + String hint = weightingMatchesButNotEdgeBased + ? " The '" + Parameters.Routing.EDGE_BASED + "' url parameter is missing or does not fit the weightings. Its value was: '" + edgeBased + "'" + : ""; + throw new IllegalArgumentException("Cannot find CH RoutingAlgorithmFactory for weighting map " + map + " in entries: " + entriesStrs + "." + hint); +2:257,263c +3:257,263c + Boolean edgeBased = map.has(Routing.EDGE_BASED) ? map.getBool(Routing.EDGE_BASED, false) : null; + Integer uTurnCosts = map.has(Routing.UTURN_COSTS) ? map.getInt(Routing.UTURN_COSTS, INFINITE_UTURN_COSTS) : null; + try { + return PCHSelector.select(getPreparations(), map, edgeBased, uTurnCosts); + } catch (NoSuchCHPreparationException e) { + throw new IllegalArgumentException(e.getMessage()); + } diff --git a/src/python/merge_conflict_analysis_diffs/1177/spork/diff_GraphHopperIT.java.txt b/src/python/merge_conflict_analysis_diffs/1177/spork/diff_GraphHopperIT.java.txt new file mode 100644 index 0000000000..38bdd97c80 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/spork/diff_GraphHopperIT.java.txt @@ -0,0 +1,114 @@ +====1 +1:1025,1026c + // no edge_based parameter -> use node-based (because its faster) + assertMoscowNodeBased(tmpHopper, "none", true); +2:1025,1026c +3:1025,1026c + // no edge_based parameter -> use edge-based (because its there) + assertMoscowEdgeBased(tmpHopper, "none", true); +====1 +1:1053a +2:1054,1104c +3:1054,1104c + @Test + public void testNodeBasedCHOnlyButTurnCostForNonCH() { + // before edge-based CH was added a common case was to use edge-based without CH and CH for node-based + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator() + .setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.OFF) + .setDisablingAllowed(true); + tmpHopper.importOrLoad(); + + // without CH -> use edge-based unless disabled explicitly + assertMoscowEdgeBased(tmpHopper, "none", false); + assertMoscowEdgeBased(tmpHopper, "true", false); + assertMoscowNodeBased(tmpHopper, "false", false); + + // with CH -> use node-based unless edge_based is enabled explicitly (which should give an error) + assertMoscowNodeBased(tmpHopper, "none", true); + assertMoscowNodeBased(tmpHopper, "false", true); + GHResponse rsp = runMoscow(tmpHopper, "true", true); + assertEquals(1, rsp.getErrors().size()); + assertTrue(rsp.getErrors().toString().contains("Found a node-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested edge-based CH")); + } + + @Test + public void testEdgeBasedByDefaultIfOnlyEdgeBased() { + // when there is only one edge-based CH profile, there is no need to specify edge_based=true explicitly, + // see #1637 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setStoreOnFlush(true). + setCHEnabled(true). + setGraphHopperLocation(tmpGraphFile). + setEncodingManager(EncodingManager.create("car|turn_costs=true")); + tmpHopper.getCHFactoryDecorator().setDisablingAllowed(true); + tmpHopper.getCHFactoryDecorator().setEdgeBasedCHMode(CHAlgoFactoryDecorator.EdgeBasedCHMode.EDGE_OR_NODE); + tmpHopper.importOrLoad(); + + // even when we omit the edge_based parameter we get edge-based CH, unless we disable it explicitly + assertMoscowEdgeBased(tmpHopper, "none", true); + assertMoscowEdgeBased(tmpHopper, "true", true); + GHResponse rsp = runMoscow(tmpHopper, "false", true); + assertTrue(rsp.hasErrors()); + assertTrue(rsp.getErrors().toString().contains("Found 1 edge-based CH preparation")); + assertTrue(rsp.getErrors().toString().contains("but requested node-based CH")); + } + +====1 +1:1055c + return assertMoscow(tmpHopper, edgeBasedParam, false, ch); +2:1106,1108c +3:1106,1108c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(400, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1059c + return assertMoscow(tmpHopper, edgeBasedParam, true, ch); +2:1112,1114c +3:1112,1114c + GHResponse rsp = runMoscow(tmpHopper, edgeBasedParam, ch); + assertEquals(1044, rsp.getBest().getDistance(), 1); + return rsp; +====1 +1:1062c + private GHResponse assertMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean withTurnCosts, boolean ch) { +2:1117c +3:1117c + private GHResponse runMoscow(GraphHopper tmpHopper, String edgeBasedParam, boolean ch) { +====1 +1:1070,1072c + GHResponse rsp = tmpHopper.route(req); + assertEquals(withTurnCosts ? 1044 : 400, rsp.getBest().getDistance(), 1); + return rsp; +2:1125c +3:1125c + return tmpHopper.route(req); +====1 +1:1086a +2:1140,1156c +3:1140,1156c + @Test + public void testEncoderWithTurnCostSupport_stillAllows_nodeBasedRouting() { + // see #1698 + GraphHopper tmpHopper = new GraphHopperOSM(). + setOSMFile(DIR + "/moscow.osm.gz"). + setGraphHopperLocation(tmpGraphFile). + setCHEnabled(false). + setEncodingManager(EncodingManager.create("foot,car|turn_costs=true")); + tmpHopper.importOrLoad(); + GHPoint p = new GHPoint(55.813357, 37.5958585); + GHPoint q = new GHPoint(55.811042, 37.594689); + GHRequest req = new GHRequest(p, q); + req.setVehicle("foot"); + GHResponse rsp = tmpHopper.route(req); + assertEquals("there should not be an error, but was: " + rsp.getErrors(), 0, rsp.getErrors().size()); + } + diff --git a/src/python/merge_conflict_analysis_diffs/1177/spork/diff_changelog.txt.txt b/src/python/merge_conflict_analysis_diffs/1177/spork/diff_changelog.txt.txt new file mode 100644 index 0000000000..fa8ed01105 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1177/spork/diff_changelog.txt.txt @@ -0,0 +1,10 @@ +==== +1:18a +2:19,20c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + It is now possible to specify finite u-turn costs for CH preparation, #1671 +3:19,22c + removed the 'traversal_mode` request parameter for /route, instead of 'traversal_mode=edge_based_2dir' use edge_based=true + removed GraphHopper.set/getTraversalMode() methods, #1705 + edge-based CH is now chosen by default if it was prepared, #1706 + it is now possible to specify finite u-turn costs for CH preparation, #1671 diff --git a/src/python/merge_conflict_analysis_diffs/128/git_hires_merge/diff_ApiCommands.java.txt b/src/python/merge_conflict_analysis_diffs/128/git_hires_merge/diff_ApiCommands.java.txt new file mode 100644 index 0000000000..52d80407b5 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/128/git_hires_merge/diff_ApiCommands.java.txt @@ -0,0 +1,1481 @@ +====1 +1:27a +2:28c +3:28c + import com.alibaba.nacos.naming.cluster.ServerListManager; +====1 +1:35,42c + import com.alibaba.nacos.naming.raft.Datum; + import com.alibaba.nacos.naming.raft.RaftCore; + import com.alibaba.nacos.naming.raft.RaftPeer; + import com.alibaba.nacos.naming.raft.RaftProxy; + import com.ning.http.client.AsyncCompletionHandler; + import com.ning.http.client.Response; + import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; + import org.apache.catalina.util.ParameterMap; +2:35a +3:35a +====1 +1:65,69c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.TimeUnit; + import java.util.concurrent.locks.Condition; + import java.util.concurrent.locks.Lock; + import java.util.concurrent.locks.ReentrantLock; +2:57a +3:57a +====1 +1:84c + protected DomainsManager domainsManager; +2:72,87c +3:72,87c + protected ServiceManager serviceManager; + + @Autowired + private SwitchManager switchManager; + + @Autowired + private ServerListManager serverListManager; + + @Autowired + private SwitchDomain switchDomain; + + @Autowired + private PushService pushService; + + @Autowired + private DistroMapper distroMapper; +====1 +1:112c + result.put("cacheMillis", Switch.getPushCacheMillis(client.getDom())); +2:115c +3:115c + result.put("cacheMillis", switchDomain.getPushCacheMillis(client.getDom())); +====1 +1:126c + Domain dom = domainsManager.getDomain(namespaceId, name); +2:129c +3:129c + Domain dom = serviceManager.getService(namespaceId, name); +====1 +1:138c + result.put("count", domainsManager.getDomCount()); +2:141c +3:141c + result.put("count", serviceManager.getDomCount()); +====1 +1:151c + = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:154c +3:154c + = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:182c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:185c +3:185c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:212c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:215c +3:215c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:273c + if (domainsManager.getDomain(namespaceId, dom) != null) { +2:276c +3:276c + if (serviceManager.getService(namespaceId, dom) != null) { +====1 +1:292,294c + String dom = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(dom)) { + dom = WebUtils.required(request, "dom"); +2:295,297c +3:295,297c + String serviceName = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(serviceName)) { + serviceName = WebUtils.required(request, "dom"); +====1 +1:296,297c + String app; + app = WebUtils.optional(request, "app", StringUtils.EMPTY); +2:299c +3:299c + +====1 +1:305c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, dom); +2:307c +3:307c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, serviceName); +====1 +1:308,314c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + Map stringMap = new HashMap<>(16); + stringMap.put(Constants.REQUEST_PARAM_SERVICE_NAME, Arrays.asList(dom).toArray(new String[1])); + stringMap.put("enableClientBeat", Arrays.asList("true").toArray(new String[1])); + stringMap.put("cktype", Arrays.asList("TCP").toArray(new String[1])); + stringMap.put("appName", Arrays.asList(app).toArray(new String[1])); + stringMap.put("clusterName", Arrays.asList(clusterName).toArray(new String[1])); +2:310,311c +3:310,311c + IpAddress ipAddress = serviceManager.getInstance(namespaceId, serviceName, clientBeat.getCluster(), clientBeat.getIp(), + clientBeat.getPort()); +====1 +1:316,319c + //if domain does not exist, register it. + if (virtualClusterDomain == null) { + regDom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("dom not found, register it, dom: {}", dom); +2:313,322c +3:313,322c + if (ipAddress == null) { + ipAddress = new IpAddress(); + ipAddress.setPort(clientBeat.getPort()); + ipAddress.setIp(clientBeat.getIp()); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(serviceName); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:322,325c + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + String ip = clientBeat.getIp(); + int port = clientBeat.getPort(); +2:325c +3:325c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:327,354c + IpAddress ipAddress = new IpAddress(); + ipAddress.setPort(port); + ipAddress.setIp(ip); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(dom); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } + + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", Switch.getClientBeatInterval()); + + if (!virtualClusterDomain.allIPs().contains(ipAddress)) { + + if (!virtualClusterDomain.getEnableClientBeat()) { + return result; + } + + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("ip not found, register it, dom: {}, ip: {}", dom, ipAddress); +2:327,328c +3:327,328c + if (virtualClusterDomain == null) { + throw new NacosException(NacosException.SERVER_ERROR, "service not found: " + serviceName + "@" + namespaceId); +====1 +1:357,359c + if (!DistroMapper.responsible(dom)) { + String server = DistroMapper.mapSrv(dom); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", dom, server); +2:331,333c +3:331,333c + if (!distroMapper.responsible(serviceName)) { + String server = distroMapper.mapSrv(serviceName); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", serviceName, server); +====1 +1:381a +2:356,359c +3:356,359c + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", switchDomain.getClientBeatInterval()); + +====1 +1:385c + +2:362a +3:362a +====1 +1:400c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +2:377c +3:377c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +====1 +1:406c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +2:383c +3:383c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +====1 +1:503c + domainsManager.easyAddOrReplaceDom(domObj); +2:480c +3:480c + serviceManager.addOrReplaceService(domObj); +====1 +1:534c + String dom = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +2:511c +3:511c + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +====1 +1:536c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:513c +3:513c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:541,547c + ParameterMap parameterMap = new ParameterMap<>(); + parameterMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + parameterMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + parameterMap.put("json", Arrays.asList("true").toArray(new String[1])); + parameterMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + return remvIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, parameterMap)); +2:518c +3:518c + serviceManager.removeInstance(namespaceId, serviceName, ipAddress); +====1 +1:548a +2:520c +3:520c + return "ok"; +====1 +1:551c + @SuppressFBWarnings("JLM_JSR166_LOCK_MONITORENTER") +2:522a +3:522a +====1 +1:555,556c + String dom = WebUtils.required(request, "serviceName"); + String tenant = WebUtils.optional(request, "tid", StringUtils.EMPTY); +2:526,527c +3:526,527c + String serviceName = WebUtils.required(request, "serviceName"); + String clusterName = WebUtils.required(request, "clusterName"); +====1 +1:558c + String env = WebUtils.optional(request, "env", StringUtils.EMPTY); +2:528a +3:528a +====1 +1:560,562c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, UtilsAndCommons.getDefaultNamespaceId()); + + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:530,531c +3:530,531c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +====1 +1:566c + ipAddress.setServiceName(dom); +2:535c +3:535c + ipAddress.setServiceName(serviceName); +====1 +1:573,616c + if (virtualClusterDomain == null) { + + Lock lock = domainsManager.addLockIfAbsent(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + Condition condition = domainsManager.addCondtion(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + try { + regDom(request); + } catch (Exception e) { + Loggers.SRV_LOG.error("[REG-SERIVCE] register service failed, service:" + dom, e); + } + } + }); + try { + lock.lock(); + condition.await(5000, TimeUnit.MILLISECONDS); + } finally { + lock.unlock(); + } + + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + } + + if (virtualClusterDomain != null) { + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(request); + } + + if (Loggers.SRV_LOG.isDebugEnabled()) { + Loggers.SRV_LOG.debug("reg-service add ip: {}|{}", dom, ipAddress.toJSON()); + } + + Map stringMap = new HashMap<>(16); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } else { + throw new IllegalArgumentException("dom not found: " + dom); + } +2:542c +3:542c + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:621c + +2:546a +3:546a +====1 +1:628c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, name); +2:553c +3:553c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, name); +====1 +1:633,637c + RaftPeer leader = RaftCore.getLeader(); + if (leader == null) { + throw new IllegalStateException("not leader at present, cannot update"); + } + +2:557a +3:557a +====1 +1:678c + if (cktype.equals(AbstractHealthCheckProcessor.HTTP_PROCESSOR.getType())) { +2:598c +3:598c + if (cktype.equals(HealthCheckType.HTTP.name().toLowerCase())) { +====1 +1:683c + } else if (cktype.equals(AbstractHealthCheckProcessor.TCP_PROCESSOR.getType())) { +2:603c +3:603c + } else if (cktype.equals(HealthCheckType.TCP.name().toLowerCase())) { +====1 +1:687c + } else if (cktype.equals(AbstractHealthCheckProcessor.MYSQL_PROCESSOR.getType())) { +2:607c +3:607c + } else if (cktype.equals(HealthCheckType.MYSQL.name().toLowerCase())) { +====1 +1:779c + domainsManager.easyAddOrReplaceDom(dom); +2:699c +3:699c + serviceManager.addOrReplaceService(dom); +====1 +1:787c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + domainsManager.getDomCount() +2:707c +3:707c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + serviceManager.getDomCount() +====1 +1:792c + +2:711a +3:711a +====1 +1:800c + if (domainsManager.getDomain(namespaceId, dom) == null) { +2:719c +3:719c + if (serviceManager.getService(namespaceId, dom) == null) { +====1 +1:804c + domainsManager.easyRemoveDom(namespaceId, dom); +2:723c +3:723c + serviceManager.easyRemoveDom(namespaceId, dom); +====1 +1:814c + Map> domMap = domainsManager.getAllDomNames(); +2:733c +3:733c + Map> domMap = serviceManager.getAllDomNames(); +====1 +1:818c + Domain domObj = domainsManager.getDomain(namespaceId, dom); +2:737c +3:737c + Domain domObj = serviceManager.getService(namespaceId, dom); +====1 +1:841,979c + @RequestMapping("/onAddIP4Dom") + public String onAddIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer {} tried to publish data but wasn't leader, leader: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: {}, cur-term: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term.get()); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + final String dom = WebUtils.required(request, "dom"); + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", Boolean.FALSE.toString())); + + String ipListString = WebUtils.required(request, "ipList"); + List newIPs = new ArrayList<>(); + + List ipList; + if (Boolean.parseBoolean(WebUtils.optional(request, SwitchEntry.PARAM_JSON, Boolean.FALSE.toString()))) { + newIPs = JSON.parseObject(ipListString, new TypeReference>() { + }); + } else { + ipList = Arrays.asList(ipListString.split(",")); + for (String ip : ipList) { + IpAddress ipAddr = IpAddress.fromJSON(ip); + newIPs.add(ipAddr); + } + } + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domainsManager.getDomain(namespaceId, dom).allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + domainsManager.easyAddIP4Dom(namespaceId, dom, newIPs, term); + + return "ok"; + } + + private void syncOnUpdateIP4Dom(String namespaceId, String dom, Map proxyParams, String action) throws InterruptedException { + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + final CountDownLatch countDownLatch = new CountDownLatch(RaftCore.getPeerSet().majorityCount()); + updateIpPublish(proxyParams, countDownLatch, action); + if (!countDownLatch.await(UtilsAndCommons.MAX_PUBLISH_WAIT_TIME_MILLIS, TimeUnit.MILLISECONDS)) { + Loggers.RAFT.info("data publish failed, key=" + key, ",notify timeout."); + throw new IllegalArgumentException("data publish failed, key=" + key); + } + } + + private void syncOnAddIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void asyncOnAddIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void syncOnRemvIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void asyncOnRemvIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void updateIpPublish(Map proxyParams, CountDownLatch countDownLatch, String action) { + + for (final String peer : RaftCore.getPeerSet().allServersWithoutMySelf()) { + + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + + String server = peer; + + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String api = action.equals("remove") ? "onRemvIP4Dom" : "onAddIP4Dom"; + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/" + api; + + try { + HttpClient.asyncHttpPost(url, null, proxyParams, new AsyncCompletionHandler() { + @Override + public Integer onCompleted(Response response) throws Exception { + if (response.getStatusCode() != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip params: " + proxyParams + + ",code: " + response.getStatusCode() + ", caused " + response.getResponseBody() + + ", server: " + peer); + return 1; + } + if (countDownLatch != null) { + countDownLatch.countDown(); + } + return 0; + } + }); + } catch (Exception e) { + Loggers.SRV_LOG.error(action + "-IP", "failed when publish to peer." + url, e); + } + } + }); + } + } + +2:759a +3:759a +====1 +1:984c + if (Switch.getDisableAddIP()) { +2:764c +3:764c + if (switchDomain.isDisableAddIP()) { +====1 +1:988,989c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +2:767a +3:767a +====1 +1:999a +2:778,784c +3:778,784c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String clusterName = WebUtils.required(request, "clusterName"); + +====1 +1:1005c + ipList = Arrays.asList(ipListString); +2:789a +3:789a +====1 +1:1016,1110c + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/addIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip for dom, caused {}", result1.content); + throw new IllegalArgumentException("failed to add ip for dom, caused " + result1.content); + } + + return "ok"; + } + + final String dom = WebUtils.required(request, "dom"); + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", "false")); + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domain.allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + + String key = UtilsAndCommons.getIPListStoreKey(domain); + + Datum datum = RaftCore.getDatum(key); + if (datum == null) { + try { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).lock(); + datum = RaftCore.getDatum(key); + if (datum == null) { + datum = new Datum(); + datum.key = key; + RaftCore.addDatum(datum); + } + } finally { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).unlock(); + } + } + + long timestamp = RaftCore.getDatum(key).timestamp.get(); + + if (RaftCore.isLeader()) { + try { + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onAddIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnAddIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnAddIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + } +2:800c +3:800c + serviceManager.addInstance(namespaceId, serviceName, clusterName, newIPs.toArray(new IpAddress[newIPs.size()])); +====1 +1:1119c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:809c +3:809c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:1127c + long cacheMillis = Switch.getCacheMillis(dom); +2:817c +3:817c + long cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1131,1132c + if (udpPort > 0 && PushService.canEnablePush(agent)) { + PushService.addClient(namespaceId, dom, +2:821,822c +3:821,822c + if (udpPort > 0 && pushService.canEnablePush(agent)) { + pushService.addClient(namespaceId, dom, +====1 +1:1139c + cacheMillis = Switch.getPushCacheMillis(dom); +2:829c +3:829c + cacheMillis = switchDomain.getPushCacheMillis(dom); +====1 +1:1143c + cacheMillis = Switch.getCacheMillis(dom); +2:833c +3:833c + cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1233,1274c + @RequestMapping("/onRemvIP4Dom") + public void onRemvIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Deleting IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer(" + JSON.toJSONString(clientIP) + ") tried to publish " + + "data but wasn't leader, leader: " + JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: " + + JSON.toJSONString(clientIP) + ", cur-term: " + JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + final String dom = WebUtils.required(request, "dom"); + final String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + List removedIPs = getIpAddresses(request); + + if (CollectionUtils.isEmpty(removedIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + domainsManager.easyRemvIP4Dom(namespaceId, dom, removedIPs, term); + } + +2:922a +3:922a +====1 +1:1279,1280c + if (DistroMapper.getLocalhostIP().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + DistroMapper.getLocalhostIP()); +2:927,928c +3:927,928c + if (NetUtils.localServer().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + NetUtils.localServer()); +====1 +1:1308,1314c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + String dom = WebUtils.required(request, "dom"); + String ipListString = WebUtils.required(request, "ipList"); + + if (Loggers.DEBUG_LOG.isDebugEnabled()) { + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: serviceName: {}, iplist: {}", dom, ipListString); +2:956,957c +3:956,957c + if (switchDomain.isDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); +====1 +1:1323c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments, params: {}", proxyParams); +2:966c +3:966c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: {}", proxyParams); +====1 +1:1326c + List ipList = new ArrayList<>(); +2:969,976c +3:969,976c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String ipListString = WebUtils.required(request, "ipList"); + final List ipList; + List removedIPs = new ArrayList<>(); +====1 +1:1328c + List ipObjList = new ArrayList<>(ipList.size()); +2:977a +3:977a +====1 +1:1330,1331c + ipList = Arrays.asList(ipListString); + ipObjList = JSON.parseObject(ipListString, new TypeReference>() { +2:979c +3:979c + removedIPs = JSON.parseObject(ipListString, new TypeReference>() { +====1 +1:1336,1359c + ipObjList.add(IpAddress.fromJSON(ip)); + } + } + + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/remvIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to remove ip for dom, caused: {}", result1.content); + throw new IllegalArgumentException("failed to remove ip for dom, caused " + result1.content); +2:984,985c +3:984,985c + IpAddress ipAddr = IpAddress.fromJSON(ip); + removedIPs.add(ipAddr); +====1 +1:1361,1379c + + return "ok"; + } + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + if (CollectionUtils.isEmpty(ipObjList)) { + throw new IllegalArgumentException("Empty ip list"); + } + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + long timestamp = 1; + if (RaftCore.getDatum(key) != null) { + timestamp = RaftCore.getDatum(key).timestamp.get(); +2:986a +3:986a +====1 +1:1382,1412c + if (RaftCore.isLeader()) { + + try { + + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onRemvIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnRemvIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnRemvIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + Loggers.EVT_LOG.info("dom: {} {POS} {IP-REMV} new: {} operatorIP: {}", + dom, ipListString, WebUtils.optional(request, "clientIP", "unknown")); + } +2:989c +3:989c + serviceManager.removeInstance(namespaceId, serviceName, removedIPs.toArray(new IpAddress[removedIPs.size()])); +====1 +1:1426,1428c + int failedPushCount = PushService.getFailedPushCount(); + result.put("succeed", PushService.getTotalPush() - failedPushCount); + result.put("total", PushService.getTotalPush()); +2:1003,1005c +3:1003,1005c + int failedPushCount = pushService.getFailedPushCount(); + result.put("succeed", pushService.getTotalPush() - failedPushCount); + result.put("total", pushService.getTotalPush()); +====1 +1:1430,1431c + if (PushService.getTotalPush() > 0) { + result.put("ratio", ((float) PushService.getTotalPush() - failedPushCount) / PushService.getTotalPush()); +2:1007,1008c +3:1007,1008c + if (pushService.getTotalPush() > 0) { + result.put("ratio", ((float) pushService.getTotalPush() - failedPushCount) / pushService.getTotalPush()); +====1 +1:1457,1459c + + ReentrantLock lock = new ReentrantLock(); + +2:1033a +3:1033a +====1 +1:1463a +2:1038,1039c +3:1038,1039c + String entry = WebUtils.required(request, "entry"); + String value = WebUtils.required(request, "value"); +====1 +1:1465,1846c + if (!RaftCore.isLeader() && !debug) { + Map tmpParams = new HashMap<>(16); + for (Map.Entry entry : request.getParameterMap().entrySet()) { + tmpParams.put(entry.getKey(), entry.getValue()[0]); + } + + RaftProxy.proxyGET(UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/updateSwitch", tmpParams); + return "ok"; + } + + try { + lock.lock(); + String entry = WebUtils.required(request, "entry"); + + Datum datum = RaftCore.getDatum(UtilsAndCommons.DOMAINS_DATA_ID_PRE + UtilsAndCommons.SWITCH_DOMAIN_NAME); + SwitchDomain switchDomain = null; + + if (datum != null) { + switchDomain = JSON.parseObject(datum.value, SwitchDomain.class); + } else { + Loggers.SRV_LOG.warn("datum: {}{} is null", UtilsAndCommons.DOMAINS_DATA_ID_PRE, UtilsAndCommons.SWITCH_DOMAIN_NAME); + } + + if (SwitchEntry.BATCH.equals(entry)) { + //batch update + SwitchDomain dom = JSON.parseObject(WebUtils.required(request, "json"), SwitchDomain.class); + dom.setEnableStandalone(Switch.isEnableStandalone()); + if (dom.httpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN + || dom.tcpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN) { + + throw new IllegalArgumentException("min check time for http or tcp is too small(<500)"); + } + + if (dom.httpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX + || dom.tcpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX) { + + throw new IllegalArgumentException("max check time for http or tcp is too small(<3000)"); + } + + if (dom.httpHealthParams.getFactor() < 0 + || dom.httpHealthParams.getFactor() > 1 + || dom.tcpHealthParams.getFactor() < 0 + || dom.tcpHealthParams.getFactor() > 1) { + + throw new IllegalArgumentException("malformed factor"); + } + + Switch.setDom(dom); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (switchDomain != null) { + Switch.setDom(switchDomain); + } + + if (entry.equals(SwitchEntry.DISTRO_THRESHOLD)) { + Float threshold = Float.parseFloat(WebUtils.required(request, "distroThreshold")); + + if (threshold <= 0) { + throw new IllegalArgumentException("distroThreshold can not be zero or negative: " + threshold); + } + + Switch.setDistroThreshold(threshold); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + + if (entry.equals(SwitchEntry.ENABLE_ALL_DOM_NAME_CACHE)) { + Boolean enable = Boolean.parseBoolean(WebUtils.required(request, "enableAllDomNameCache")); + Switch.setAllDomNameCache(enable); + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.INCREMENTAL_LIST)) { + String action = WebUtils.required(request, "action"); + List doms = Arrays.asList(WebUtils.required(request, "incrementalList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getIncrementalList().addAll(doms); + } else if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getIncrementalList().removeAll(doms); + } else { + throw new IllegalArgumentException("action is not allowed: " + action); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_WHITLE_LIST)) { + String action = WebUtils.required(request, "action"); + List whiteList = Arrays.asList(WebUtils.required(request, "healthCheckWhiteList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getHealthCheckWhiteList().addAll(whiteList); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getHealthCheckWhiteList().removeAll(whiteList); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.CLIENT_BEAT_INTERVAL)) { + long clientBeatInterval = Long.parseLong(WebUtils.required(request, "clientBeatInterval")); + Switch.setClientBeatInterval(clientBeatInterval); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setPushJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setPushPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setPushCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_GO, type)) { + Switch.setPushGoVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.TRAFFIC_SCHEDULING_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setTrafficSchedulingJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setTrafficSchedulingPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setTrafficSchedulingCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_TENGINE, type)) { + Switch.setTrafficSchedulingTengineVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_PUSH_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min cache time for http or tcp is too small(<10000)"); + } + + Switch.setPushCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + // extremely careful while modifying this, cause it will affect all clients without pushing enabled + if (entry.equals(SwitchEntry.DEFAULT_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min default cache time is too small(<1000)"); + } + + Switch.setCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.MASTERS)) { + List masters = Arrays.asList(WebUtils.required(request, "names").split(",")); + + Switch.setMasters(masters); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISTRO)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setDistroEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.CHECK)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setHeathCheckEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DEFAULT_HEALTH_CHECK_MODE)) { + String defaultHealthCheckMode = WebUtils.required(request, "mode"); + + Switch.setDefaultHealthCheckMode(defaultHealthCheckMode); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DOM_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_DOM_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("domStatusSynchronizationPeriodMillis is too small(<5000)"); + } + + Switch.setDomStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SERVER_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_SERVER_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("serverStatusSynchronizationPeriodMillis is too small(<15000)"); + } + + Switch.setServerStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_TIMES)) { + Integer times = Integer.parseInt(WebUtils.required(request, "times")); + + Switch.setCheckTimes(times); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISABLE_ADD_IP)) { + boolean disableAddIP = Boolean.parseBoolean(WebUtils.required(request, "disableAddIP")); + + Switch.setDisableAddIP(disableAddIP); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.ENABLE_CACHE)) { + boolean enableCache = Boolean.parseBoolean(WebUtils.required(request, "enableCache")); + + Switch.setEnableCache(enableCache); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SEND_BEAT_ONLY)) { + boolean sendBeatOnly = Boolean.parseBoolean(WebUtils.required(request, "sendBeatOnly")); + + Switch.setSendBeatOnly(sendBeatOnly); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.LIMITED_URL_MAP)) { + Map limitedUrlMap = new HashMap<>(16); + String limitedUrls = WebUtils.required(request, "limitedUrls"); + + if (!StringUtils.isEmpty(limitedUrls)) { + String[] entries = limitedUrls.split(","); + for (int i = 0; i < entries.length; i++) { + String[] parts = entries[i].split(":"); + if (parts.length < 2) { + throw new IllegalArgumentException("invalid input for limited urls"); + } + + String limitedUrl = parts[0]; + if (StringUtils.isEmpty(limitedUrl)) { + throw new IllegalArgumentException("url can not be empty, url: " + limitedUrl); + } + + int statusCode = Integer.parseInt(parts[1]); + if (statusCode <= 0) { + throw new IllegalArgumentException("illegal normal status code: " + statusCode); + } + + limitedUrlMap.put(limitedUrl, statusCode); + + } + + Switch.setLimitedUrlMap(limitedUrlMap); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.ENABLE_STANDALONE)) { + String enable = WebUtils.required(request, "enableStandalone"); + + if (!StringUtils.isNotEmpty(enable)) { + Switch.setEnableStandalone(Boolean.parseBoolean(enable)); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + + throw new IllegalArgumentException("update entry not found: " + entry); + } finally { + lock.unlock(); + } + +2:1041c +3:1041c + switchManager.update(entry, value, debug); +====1 +1:1847a +2:1043c +3:1043c + return "ok"; +====1 +1:1858c + return JSON.parseObject(Switch.getDom().toJSON()); +2:1054c +3:1054c + return JSON.parseObject(switchDomain.toJSON()); +====1 +1:1906c + Map> domMap = domainsManager.getAllDomNames(); +2:1102c +3:1102c + Map> domMap = serviceManager.getAllDomNames(); +====1 +1:1911c + if (DistroMapper.responsible(dom) || !responsibleOnly) { +2:1107c +3:1107c + if (distroMapper.responsible(dom) || !responsibleOnly) { +====1 +1:1933,1934c + List doms + = domainsManager.searchDomains(namespaceId, ".*" + expr + ".*"); +2:1129,1130c +3:1129,1130c + List doms + = serviceManager.searchDomains(namespaceId, ".*" + expr + ".*"); +====1 +1:1980c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1176c +3:1176c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2063c + domainsManager.easyAddOrReplaceDom(domObj); +2:1259c +3:1259c + serviceManager.addOrReplaceService(domObj); +====1 +1:2082c + result.put("status", DistroMapper.getDistroConfig()); +2:1278c +3:1278c + result.put("status", distroMapper.getDistroConfig()); +====1 +1:2087c + DistroMapper.clean(); +2:1283c +3:1283c + distroMapper.clean(); +====1 +1:2099,2100c + int domCount = domainsManager.getDomCount(); + int ipCount = domainsManager.getInstanceCount(); +2:1295,1296c +3:1295,1296c + int domCount = serviceManager.getDomCount(); + int ipCount = serviceManager.getInstanceCount(); +====1 +1:2102,2103c + int responsibleDomCount = domainsManager.getResponsibleDomCount(); + int responsibleIPCount = domainsManager.getResponsibleIPCount(); +2:1298,1299c +3:1298,1299c + int responsibleDomCount = serviceManager.getResponsibleDomCount(); + int responsibleIPCount = serviceManager.getResponsibleIPCount(); +====1 +1:2112c + result.put("notifyTask", RaftCore.notifier.getTaskSize()); +2:1307a +3:1307a +====1 +1:2198c + DistroMapper.onReceiveServerStatus(serverStatus); +2:1393c +3:1393c + distroMapper.onReceiveServerStatus(serverStatus); +====1 +1:2209c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1404c +3:1404c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2229c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1424c +3:1424c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2237c + result.put("responsibleServer", DistroMapper.mapSrv(dom)); +2:1432c +3:1432c + result.put("responsibleServer", distroMapper.mapSrv(dom)); +====1 +1:2246c + result.put("healthyList", DistroMapper.getHealthyList()); +2:1441c +3:1441c + result.put("healthyList", distroMapper.getHealthyList()); +====1 +1:2256c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1451c +3:1451c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2264c + result.put("responsible", DistroMapper.responsible(dom)); +2:1459c +3:1459c + result.put("responsible", distroMapper.responsible(dom)); +====1 +1:2275c + if (!NamingProxy.getServers().contains(serverIP)) { +2:1470c +3:1470c + if (!serverListManager.contains(serverIP)) { +====1 +1:2280c + DomainsManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, DomainsManager.DomainChecksum.class); +2:1475c +3:1475c + ServiceManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, ServiceManager.DomainChecksum.class); +====1 +1:2292c + Domain domain = domainsManager.getDomain(checksums.namespaceId, dom); +2:1487c +3:1487c + Domain domain = serviceManager.getService(checksums.namespaceId, dom); +====1 +1:2305c + domainsManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +2:1500c +3:1500c + serviceManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +====1 +1:2354c + pac.put("checkServer", DistroMapper.mapSrvName(vDom.getName())); +2:1549c +3:1549c + pac.put("checkServer", distroMapper.mapSrvName(vDom.getName())); +====1 +1:2432,2433c + public void setDomainsManager(DomainsManager domainsManager) { + this.domainsManager = domainsManager; +2:1627,1628c +3:1627,1628c + public void setServiceManager(ServiceManager serviceManager) { + this.serviceManager = serviceManager; diff --git a/src/python/merge_conflict_analysis_diffs/128/gitmerge_ort/diff_ApiCommands.java.txt b/src/python/merge_conflict_analysis_diffs/128/gitmerge_ort/diff_ApiCommands.java.txt new file mode 100644 index 0000000000..6d144cc367 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/128/gitmerge_ort/diff_ApiCommands.java.txt @@ -0,0 +1,1549 @@ +====1 +1:27a +2:28c +3:28c + import com.alibaba.nacos.naming.cluster.ServerListManager; +====1 +1:35,42c + import com.alibaba.nacos.naming.raft.Datum; + import com.alibaba.nacos.naming.raft.RaftCore; + import com.alibaba.nacos.naming.raft.RaftPeer; + import com.alibaba.nacos.naming.raft.RaftProxy; + import com.ning.http.client.AsyncCompletionHandler; + import com.ning.http.client.Response; + import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; + import org.apache.catalina.util.ParameterMap; +2:35a +3:35a +====1 +1:65,69c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.TimeUnit; + import java.util.concurrent.locks.Condition; + import java.util.concurrent.locks.Lock; + import java.util.concurrent.locks.ReentrantLock; +2:57a +3:57a +====1 +1:84c + protected DomainsManager domainsManager; +2:72,87c +3:72,87c + protected ServiceManager serviceManager; + + @Autowired + private SwitchManager switchManager; + + @Autowired + private ServerListManager serverListManager; + + @Autowired + private SwitchDomain switchDomain; + + @Autowired + private PushService pushService; + + @Autowired + private DistroMapper distroMapper; +====1 +1:112c + result.put("cacheMillis", Switch.getPushCacheMillis(client.getDom())); +2:115c +3:115c + result.put("cacheMillis", switchDomain.getPushCacheMillis(client.getDom())); +====1 +1:126c + Domain dom = domainsManager.getDomain(namespaceId, name); +2:129c +3:129c + Domain dom = serviceManager.getService(namespaceId, name); +====1 +1:138c + result.put("count", domainsManager.getDomCount()); +2:141c +3:141c + result.put("count", serviceManager.getDomCount()); +====1 +1:151c + = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:154c +3:154c + = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:182c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:185c +3:185c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:212c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:215c +3:215c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:273c + if (domainsManager.getDomain(namespaceId, dom) != null) { +2:276c +3:276c + if (serviceManager.getService(namespaceId, dom) != null) { +====1 +1:292,294c + String dom = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(dom)) { + dom = WebUtils.required(request, "dom"); +2:295,297c +3:295,297c + String serviceName = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(serviceName)) { + serviceName = WebUtils.required(request, "dom"); +====1 +1:296,297c + String app; + app = WebUtils.optional(request, "app", StringUtils.EMPTY); +2:299c +3:299c + +====1 +1:305c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, dom); +2:307c +3:307c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, serviceName); +====1 +1:308,314c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + Map stringMap = new HashMap<>(16); + stringMap.put(Constants.REQUEST_PARAM_SERVICE_NAME, Arrays.asList(dom).toArray(new String[1])); + stringMap.put("enableClientBeat", Arrays.asList("true").toArray(new String[1])); + stringMap.put("cktype", Arrays.asList("TCP").toArray(new String[1])); + stringMap.put("appName", Arrays.asList(app).toArray(new String[1])); + stringMap.put("clusterName", Arrays.asList(clusterName).toArray(new String[1])); +2:310,311c +3:310,311c + IpAddress ipAddress = serviceManager.getInstance(namespaceId, serviceName, clientBeat.getCluster(), clientBeat.getIp(), + clientBeat.getPort()); +====1 +1:316,319c + //if domain does not exist, register it. + if (virtualClusterDomain == null) { + regDom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("dom not found, register it, dom: {}", dom); +2:313,322c +3:313,322c + if (ipAddress == null) { + ipAddress = new IpAddress(); + ipAddress.setPort(clientBeat.getPort()); + ipAddress.setIp(clientBeat.getIp()); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(serviceName); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:322,325c + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + String ip = clientBeat.getIp(); + int port = clientBeat.getPort(); +2:325c +3:325c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:327,354c + IpAddress ipAddress = new IpAddress(); + ipAddress.setPort(port); + ipAddress.setIp(ip); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(dom); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } + + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", Switch.getClientBeatInterval()); + + if (!virtualClusterDomain.allIPs().contains(ipAddress)) { + + if (!virtualClusterDomain.getEnableClientBeat()) { + return result; + } + + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("ip not found, register it, dom: {}, ip: {}", dom, ipAddress); +2:327,328c +3:327,328c + if (virtualClusterDomain == null) { + throw new NacosException(NacosException.SERVER_ERROR, "service not found: " + serviceName + "@" + namespaceId); +====1 +1:357,359c + if (!DistroMapper.responsible(dom)) { + String server = DistroMapper.mapSrv(dom); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", dom, server); +2:331,333c +3:331,333c + if (!distroMapper.responsible(serviceName)) { + String server = distroMapper.mapSrv(serviceName); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", serviceName, server); +====1 +1:381a +2:356,359c +3:356,359c + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", switchDomain.getClientBeatInterval()); + +====1 +1:385c + +2:362a +3:362a +====1 +1:400c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +2:377c +3:377c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +====1 +1:406c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +2:383c +3:383c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +====1 +1:503c + domainsManager.easyAddOrReplaceDom(domObj); +2:480c +3:480c + serviceManager.addOrReplaceService(domObj); +====1 +1:534c + String dom = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +2:511c +3:511c + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +====1 +1:536c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:513c +3:513c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:541,547c + ParameterMap parameterMap = new ParameterMap<>(); + parameterMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + parameterMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + parameterMap.put("json", Arrays.asList("true").toArray(new String[1])); + parameterMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + return remvIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, parameterMap)); +2:518c +3:518c + serviceManager.removeInstance(namespaceId, serviceName, ipAddress); +====1 +1:548a +2:520c +3:520c + return "ok"; +====1 +1:551c + @SuppressFBWarnings("JLM_JSR166_LOCK_MONITORENTER") +2:522a +3:522a +====1 +1:555,556c + String dom = WebUtils.required(request, "serviceName"); + String tenant = WebUtils.optional(request, "tid", StringUtils.EMPTY); +2:526,527c +3:526,527c + String serviceName = WebUtils.required(request, "serviceName"); + String clusterName = WebUtils.required(request, "clusterName"); +====1 +1:558c + String env = WebUtils.optional(request, "env", StringUtils.EMPTY); +2:528a +3:528a +====1 +1:560,562c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, UtilsAndCommons.getDefaultNamespaceId()); + + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:530,531c +3:530,531c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +====1 +1:566c + ipAddress.setServiceName(dom); +2:535c +3:535c + ipAddress.setServiceName(serviceName); +====1 +1:573,616c + if (virtualClusterDomain == null) { + + Lock lock = domainsManager.addLockIfAbsent(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + Condition condition = domainsManager.addCondtion(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + try { + regDom(request); + } catch (Exception e) { + Loggers.SRV_LOG.error("[REG-SERIVCE] register service failed, service:" + dom, e); + } + } + }); + try { + lock.lock(); + condition.await(5000, TimeUnit.MILLISECONDS); + } finally { + lock.unlock(); + } + + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + } + + if (virtualClusterDomain != null) { + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(request); + } + + if (Loggers.SRV_LOG.isDebugEnabled()) { + Loggers.SRV_LOG.debug("reg-service add ip: {}|{}", dom, ipAddress.toJSON()); + } + + Map stringMap = new HashMap<>(16); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } else { + throw new IllegalArgumentException("dom not found: " + dom); + } +2:542c +3:542c + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:621c + +2:546a +3:546a +====1 +1:628c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, name); +2:553c +3:553c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, name); +====1 +1:633,637c + RaftPeer leader = RaftCore.getLeader(); + if (leader == null) { + throw new IllegalStateException("not leader at present, cannot update"); + } + +2:557a +3:557a +====1 +1:678c + if (cktype.equals(AbstractHealthCheckProcessor.HTTP_PROCESSOR.getType())) { +2:598c +3:598c + if (cktype.equals(HealthCheckType.HTTP.name().toLowerCase())) { +====1 +1:683c + } else if (cktype.equals(AbstractHealthCheckProcessor.TCP_PROCESSOR.getType())) { +2:603c +3:603c + } else if (cktype.equals(HealthCheckType.TCP.name().toLowerCase())) { +====1 +1:687c + } else if (cktype.equals(AbstractHealthCheckProcessor.MYSQL_PROCESSOR.getType())) { +2:607c +3:607c + } else if (cktype.equals(HealthCheckType.MYSQL.name().toLowerCase())) { +====1 +1:779c + domainsManager.easyAddOrReplaceDom(dom); +2:699c +3:699c + serviceManager.addOrReplaceService(dom); +====1 +1:787c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + domainsManager.getDomCount() +2:707c +3:707c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + serviceManager.getDomCount() +====1 +1:792c + +2:711a +3:711a +====1 +1:800c + if (domainsManager.getDomain(namespaceId, dom) == null) { +2:719c +3:719c + if (serviceManager.getService(namespaceId, dom) == null) { +====1 +1:804c + domainsManager.easyRemoveDom(namespaceId, dom); +2:723c +3:723c + serviceManager.easyRemoveDom(namespaceId, dom); +====1 +1:814c + Map> domMap = domainsManager.getAllDomNames(); +2:733c +3:733c + Map> domMap = serviceManager.getAllDomNames(); +====1 +1:818c + Domain domObj = domainsManager.getDomain(namespaceId, dom); +2:737c +3:737c + Domain domObj = serviceManager.getService(namespaceId, dom); +====1 +1:841,979c + @RequestMapping("/onAddIP4Dom") + public String onAddIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer {} tried to publish data but wasn't leader, leader: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: {}, cur-term: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term.get()); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + final String dom = WebUtils.required(request, "dom"); + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", Boolean.FALSE.toString())); + + String ipListString = WebUtils.required(request, "ipList"); + List newIPs = new ArrayList<>(); + + List ipList; + if (Boolean.parseBoolean(WebUtils.optional(request, SwitchEntry.PARAM_JSON, Boolean.FALSE.toString()))) { + newIPs = JSON.parseObject(ipListString, new TypeReference>() { + }); + } else { + ipList = Arrays.asList(ipListString.split(",")); + for (String ip : ipList) { + IpAddress ipAddr = IpAddress.fromJSON(ip); + newIPs.add(ipAddr); + } + } + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domainsManager.getDomain(namespaceId, dom).allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + domainsManager.easyAddIP4Dom(namespaceId, dom, newIPs, term); + + return "ok"; + } + + private void syncOnUpdateIP4Dom(String namespaceId, String dom, Map proxyParams, String action) throws InterruptedException { + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + final CountDownLatch countDownLatch = new CountDownLatch(RaftCore.getPeerSet().majorityCount()); + updateIpPublish(proxyParams, countDownLatch, action); + if (!countDownLatch.await(UtilsAndCommons.MAX_PUBLISH_WAIT_TIME_MILLIS, TimeUnit.MILLISECONDS)) { + Loggers.RAFT.info("data publish failed, key=" + key, ",notify timeout."); + throw new IllegalArgumentException("data publish failed, key=" + key); + } + } + + private void syncOnAddIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void asyncOnAddIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void syncOnRemvIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void asyncOnRemvIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void updateIpPublish(Map proxyParams, CountDownLatch countDownLatch, String action) { + + for (final String peer : RaftCore.getPeerSet().allServersWithoutMySelf()) { + + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + + String server = peer; + + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String api = action.equals("remove") ? "onRemvIP4Dom" : "onAddIP4Dom"; + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/" + api; + + try { + HttpClient.asyncHttpPost(url, null, proxyParams, new AsyncCompletionHandler() { + @Override + public Integer onCompleted(Response response) throws Exception { + if (response.getStatusCode() != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip params: " + proxyParams + + ",code: " + response.getStatusCode() + ", caused " + response.getResponseBody() + + ", server: " + peer); + return 1; + } + if (countDownLatch != null) { + countDownLatch.countDown(); + } + return 0; + } + }); + } catch (Exception e) { + Loggers.SRV_LOG.error(action + "-IP", "failed when publish to peer." + url, e); + } + } + }); + } + } + +2:759a +3:759a +====1 +1:984c + if (Switch.getDisableAddIP()) { +2:764c +3:764c + if (switchDomain.isDisableAddIP()) { +====1 +1:988,989c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +2:767a +3:767a +====1 +1:999a +2:778,784c +3:778,784c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String clusterName = WebUtils.required(request, "clusterName"); + +====1 +1:1005c + ipList = Arrays.asList(ipListString); +2:789a +3:789a +====1 +1:1016,1110c + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/addIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip for dom, caused {}", result1.content); + throw new IllegalArgumentException("failed to add ip for dom, caused " + result1.content); + } + + return "ok"; + } + + final String dom = WebUtils.required(request, "dom"); + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", "false")); + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domain.allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + + String key = UtilsAndCommons.getIPListStoreKey(domain); + + Datum datum = RaftCore.getDatum(key); + if (datum == null) { + try { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).lock(); + datum = RaftCore.getDatum(key); + if (datum == null) { + datum = new Datum(); + datum.key = key; + RaftCore.addDatum(datum); + } + } finally { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).unlock(); + } + } + + long timestamp = RaftCore.getDatum(key).timestamp.get(); + + if (RaftCore.isLeader()) { + try { + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onAddIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnAddIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnAddIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + } +2:800c +3:800c + serviceManager.addInstance(namespaceId, serviceName, clusterName, newIPs.toArray(new IpAddress[newIPs.size()])); +====1 +1:1119c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:809c +3:809c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:1127c + long cacheMillis = Switch.getCacheMillis(dom); +2:817c +3:817c + long cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1131,1132c + if (udpPort > 0 && PushService.canEnablePush(agent)) { + PushService.addClient(namespaceId, dom, +2:821,822c +3:821,822c + if (udpPort > 0 && pushService.canEnablePush(agent)) { + pushService.addClient(namespaceId, dom, +====1 +1:1139c + cacheMillis = Switch.getPushCacheMillis(dom); +2:829c +3:829c + cacheMillis = switchDomain.getPushCacheMillis(dom); +====1 +1:1143c + cacheMillis = Switch.getCacheMillis(dom); +2:833c +3:833c + cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1233,1274c + @RequestMapping("/onRemvIP4Dom") + public void onRemvIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Deleting IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer(" + JSON.toJSONString(clientIP) + ") tried to publish " + + "data but wasn't leader, leader: " + JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: " + + JSON.toJSONString(clientIP) + ", cur-term: " + JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + final String dom = WebUtils.required(request, "dom"); + final String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + List removedIPs = getIpAddresses(request); + + if (CollectionUtils.isEmpty(removedIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + domainsManager.easyRemvIP4Dom(namespaceId, dom, removedIPs, term); + } + +2:922a +3:922a +====1 +1:1279,1280c + if (DistroMapper.getLocalhostIP().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + DistroMapper.getLocalhostIP()); +2:927,928c +3:927,928c + if (NetUtils.localServer().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + NetUtils.localServer()); +====1 +1:1308,1314c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + String dom = WebUtils.required(request, "dom"); + String ipListString = WebUtils.required(request, "ipList"); + + if (Loggers.DEBUG_LOG.isDebugEnabled()) { + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: serviceName: {}, iplist: {}", dom, ipListString); +2:956,957c +3:956,957c + if (switchDomain.isDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); +====1 +1:1323c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments, params: {}", proxyParams); +2:966c +3:966c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: {}", proxyParams); +====1 +1:1326c + List ipList = new ArrayList<>(); +2:969,976c +3:969,976c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String ipListString = WebUtils.required(request, "ipList"); + final List ipList; + List removedIPs = new ArrayList<>(); +====1 +1:1328c + List ipObjList = new ArrayList<>(ipList.size()); +2:977a +3:977a +====1 +1:1330,1331c + ipList = Arrays.asList(ipListString); + ipObjList = JSON.parseObject(ipListString, new TypeReference>() { +2:979c +3:979c + removedIPs = JSON.parseObject(ipListString, new TypeReference>() { +====1 +1:1336,1359c + ipObjList.add(IpAddress.fromJSON(ip)); + } + } + + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/remvIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to remove ip for dom, caused: {}", result1.content); + throw new IllegalArgumentException("failed to remove ip for dom, caused " + result1.content); +2:984,985c +3:984,985c + IpAddress ipAddr = IpAddress.fromJSON(ip); + removedIPs.add(ipAddr); +====1 +1:1361,1379c + + return "ok"; + } + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + if (CollectionUtils.isEmpty(ipObjList)) { + throw new IllegalArgumentException("Empty ip list"); + } + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + long timestamp = 1; + if (RaftCore.getDatum(key) != null) { + timestamp = RaftCore.getDatum(key).timestamp.get(); +2:986a +3:986a +====1 +1:1382,1412c + if (RaftCore.isLeader()) { + + try { + + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onRemvIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnRemvIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnRemvIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + Loggers.EVT_LOG.info("dom: {} {POS} {IP-REMV} new: {} operatorIP: {}", + dom, ipListString, WebUtils.optional(request, "clientIP", "unknown")); + } +2:989c +3:989c + serviceManager.removeInstance(namespaceId, serviceName, removedIPs.toArray(new IpAddress[removedIPs.size()])); +====1 +1:1426,1428c + int failedPushCount = PushService.getFailedPushCount(); + result.put("succeed", PushService.getTotalPush() - failedPushCount); + result.put("total", PushService.getTotalPush()); +2:1003,1005c +3:1003,1005c + int failedPushCount = pushService.getFailedPushCount(); + result.put("succeed", pushService.getTotalPush() - failedPushCount); + result.put("total", pushService.getTotalPush()); +====1 +1:1430,1431c + if (PushService.getTotalPush() > 0) { + result.put("ratio", ((float) PushService.getTotalPush() - failedPushCount) / PushService.getTotalPush()); +2:1007,1008c +3:1007,1008c + if (pushService.getTotalPush() > 0) { + result.put("ratio", ((float) pushService.getTotalPush() - failedPushCount) / pushService.getTotalPush()); +====1 +1:1457,1459c + + ReentrantLock lock = new ReentrantLock(); + +2:1033a +3:1033a +====1 +1:1463a +2:1038,1039c +3:1038,1039c + String entry = WebUtils.required(request, "entry"); + String value = WebUtils.required(request, "value"); +====1 +1:1465,1846c + if (!RaftCore.isLeader() && !debug) { + Map tmpParams = new HashMap<>(16); + for (Map.Entry entry : request.getParameterMap().entrySet()) { + tmpParams.put(entry.getKey(), entry.getValue()[0]); + } + + RaftProxy.proxyGET(UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/updateSwitch", tmpParams); + return "ok"; + } + + try { + lock.lock(); + String entry = WebUtils.required(request, "entry"); + + Datum datum = RaftCore.getDatum(UtilsAndCommons.DOMAINS_DATA_ID_PRE + UtilsAndCommons.SWITCH_DOMAIN_NAME); + SwitchDomain switchDomain = null; + + if (datum != null) { + switchDomain = JSON.parseObject(datum.value, SwitchDomain.class); + } else { + Loggers.SRV_LOG.warn("datum: {}{} is null", UtilsAndCommons.DOMAINS_DATA_ID_PRE, UtilsAndCommons.SWITCH_DOMAIN_NAME); + } + + if (SwitchEntry.BATCH.equals(entry)) { + //batch update + SwitchDomain dom = JSON.parseObject(WebUtils.required(request, "json"), SwitchDomain.class); + dom.setEnableStandalone(Switch.isEnableStandalone()); + if (dom.httpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN + || dom.tcpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN) { + + throw new IllegalArgumentException("min check time for http or tcp is too small(<500)"); + } + + if (dom.httpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX + || dom.tcpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX) { + + throw new IllegalArgumentException("max check time for http or tcp is too small(<3000)"); + } + + if (dom.httpHealthParams.getFactor() < 0 + || dom.httpHealthParams.getFactor() > 1 + || dom.tcpHealthParams.getFactor() < 0 + || dom.tcpHealthParams.getFactor() > 1) { + + throw new IllegalArgumentException("malformed factor"); + } + + Switch.setDom(dom); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (switchDomain != null) { + Switch.setDom(switchDomain); + } + + if (entry.equals(SwitchEntry.DISTRO_THRESHOLD)) { + Float threshold = Float.parseFloat(WebUtils.required(request, "distroThreshold")); + + if (threshold <= 0) { + throw new IllegalArgumentException("distroThreshold can not be zero or negative: " + threshold); + } + + Switch.setDistroThreshold(threshold); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + + if (entry.equals(SwitchEntry.ENABLE_ALL_DOM_NAME_CACHE)) { + Boolean enable = Boolean.parseBoolean(WebUtils.required(request, "enableAllDomNameCache")); + Switch.setAllDomNameCache(enable); + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.INCREMENTAL_LIST)) { + String action = WebUtils.required(request, "action"); + List doms = Arrays.asList(WebUtils.required(request, "incrementalList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getIncrementalList().addAll(doms); + } else if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getIncrementalList().removeAll(doms); + } else { + throw new IllegalArgumentException("action is not allowed: " + action); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_WHITLE_LIST)) { + String action = WebUtils.required(request, "action"); + List whiteList = Arrays.asList(WebUtils.required(request, "healthCheckWhiteList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getHealthCheckWhiteList().addAll(whiteList); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getHealthCheckWhiteList().removeAll(whiteList); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.CLIENT_BEAT_INTERVAL)) { + long clientBeatInterval = Long.parseLong(WebUtils.required(request, "clientBeatInterval")); + Switch.setClientBeatInterval(clientBeatInterval); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setPushJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setPushPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setPushCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_GO, type)) { + Switch.setPushGoVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.TRAFFIC_SCHEDULING_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setTrafficSchedulingJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setTrafficSchedulingPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setTrafficSchedulingCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_TENGINE, type)) { + Switch.setTrafficSchedulingTengineVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_PUSH_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min cache time for http or tcp is too small(<10000)"); + } + + Switch.setPushCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + // extremely careful while modifying this, cause it will affect all clients without pushing enabled + if (entry.equals(SwitchEntry.DEFAULT_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min default cache time is too small(<1000)"); + } + + Switch.setCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.MASTERS)) { + List masters = Arrays.asList(WebUtils.required(request, "names").split(",")); + + Switch.setMasters(masters); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISTRO)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setDistroEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.CHECK)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setHeathCheckEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DEFAULT_HEALTH_CHECK_MODE)) { + String defaultHealthCheckMode = WebUtils.required(request, "mode"); + + Switch.setDefaultHealthCheckMode(defaultHealthCheckMode); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DOM_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_DOM_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("domStatusSynchronizationPeriodMillis is too small(<5000)"); + } + + Switch.setDomStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SERVER_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_SERVER_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("serverStatusSynchronizationPeriodMillis is too small(<15000)"); + } + + Switch.setServerStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_TIMES)) { + Integer times = Integer.parseInt(WebUtils.required(request, "times")); + + Switch.setCheckTimes(times); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISABLE_ADD_IP)) { + boolean disableAddIP = Boolean.parseBoolean(WebUtils.required(request, "disableAddIP")); + + Switch.setDisableAddIP(disableAddIP); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.ENABLE_CACHE)) { + boolean enableCache = Boolean.parseBoolean(WebUtils.required(request, "enableCache")); + + Switch.setEnableCache(enableCache); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SEND_BEAT_ONLY)) { + boolean sendBeatOnly = Boolean.parseBoolean(WebUtils.required(request, "sendBeatOnly")); + + Switch.setSendBeatOnly(sendBeatOnly); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.LIMITED_URL_MAP)) { + Map limitedUrlMap = new HashMap<>(16); + String limitedUrls = WebUtils.required(request, "limitedUrls"); + + if (!StringUtils.isEmpty(limitedUrls)) { + String[] entries = limitedUrls.split(","); + for (int i = 0; i < entries.length; i++) { + String[] parts = entries[i].split(":"); + if (parts.length < 2) { + throw new IllegalArgumentException("invalid input for limited urls"); + } + + String limitedUrl = parts[0]; + if (StringUtils.isEmpty(limitedUrl)) { + throw new IllegalArgumentException("url can not be empty, url: " + limitedUrl); + } + + int statusCode = Integer.parseInt(parts[1]); + if (statusCode <= 0) { + throw new IllegalArgumentException("illegal normal status code: " + statusCode); + } + + limitedUrlMap.put(limitedUrl, statusCode); + + } + + Switch.setLimitedUrlMap(limitedUrlMap); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.ENABLE_STANDALONE)) { + String enable = WebUtils.required(request, "enableStandalone"); + + if (!StringUtils.isNotEmpty(enable)) { + Switch.setEnableStandalone(Boolean.parseBoolean(enable)); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + + throw new IllegalArgumentException("update entry not found: " + entry); + } finally { + lock.unlock(); + } + +2:1041c +3:1041c + switchManager.update(entry, value, debug); +====1 +1:1847a +2:1043c +3:1043c + return "ok"; +====1 +1:1858c + return JSON.parseObject(Switch.getDom().toJSON()); +2:1054c +3:1054c + return JSON.parseObject(switchDomain.toJSON()); +====2 +1:1902a +3:1098a +2:1099,1129c + Map> domMap = domainsManager.getAllDomNames(); + JSONObject result = new JSONObject(); + // For old DNS-F client: + String dnsfVersion = "1.0.1"; + String agent = request.getHeader("Client-Version"); + ClientInfo clientInfo = new ClientInfo(agent); + if (clientInfo.type == ClientInfo.ClientType.DNS && clientInfo.version.compareTo(VersionUtil.parseVersion(dnsfVersion)) <= 0) { + + List doms = new ArrayList(); + Set domSet = null; + + if (domMap.containsKey(Constants.REQUEST_PARAM_DEFAULT_NAMESPACE_ID)) { + domSet = domMap.get(Constants.REQUEST_PARAM_DEFAULT_NAMESPACE_ID); + } + + if (CollectionUtils.isEmpty(domSet)) { + result.put("doms", new HashSet<>()); + result.put("count", 0); + return result; + } + + for (String dom : domSet) { + if (DistroMapper.responsible(dom) || !responsibleOnly) { + doms.add(dom); + } + } + + result.put("doms", doms); + result.put("count", doms.size()); + return result; + } +====2 +1:1904a +3:1100a +2:1132c + <<<<<<< HEAD +====1 +1:1906c + Map> domMap = domainsManager.getAllDomNames(); +2:1134c +3:1102c + Map> domMap = serviceManager.getAllDomNames(); +====2 +1:1907a +3:1103a +2:1136,1142c + ||||||| c863cbcde + + Map> domMap = domainsManager.getAllDomNames(); + + ======= + int count = 0; + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:1911c + if (DistroMapper.responsible(dom) || !responsibleOnly) { +2:1146c +3:1107c + if (distroMapper.responsible(dom) || !responsibleOnly) { +====2 +1:1914a +3:1110a +2:1150c + count += doms.get(namespaceId).size(); +====2 +1:1917,1918c +3:1113,1114c + JSONObject result = new JSONObject(); + +2:1152a +====2 +1:1920c +3:1116c + result.put("count", doms.size()); +2:1154c + result.put("count", count); +====1 +1:1933,1934c + List doms + = domainsManager.searchDomains(namespaceId, ".*" + expr + ".*"); +2:1167,1168c +3:1129,1130c + List doms + = serviceManager.searchDomains(namespaceId, ".*" + expr + ".*"); +====1 +1:1980c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1214c +3:1176c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2063c + domainsManager.easyAddOrReplaceDom(domObj); +2:1297c +3:1259c + serviceManager.addOrReplaceService(domObj); +====1 +1:2082c + result.put("status", DistroMapper.getDistroConfig()); +2:1316c +3:1278c + result.put("status", distroMapper.getDistroConfig()); +====1 +1:2087c + DistroMapper.clean(); +2:1321c +3:1283c + distroMapper.clean(); +====1 +1:2099,2100c + int domCount = domainsManager.getDomCount(); + int ipCount = domainsManager.getInstanceCount(); +2:1333,1334c +3:1295,1296c + int domCount = serviceManager.getDomCount(); + int ipCount = serviceManager.getInstanceCount(); +====1 +1:2102,2103c + int responsibleDomCount = domainsManager.getResponsibleDomCount(); + int responsibleIPCount = domainsManager.getResponsibleIPCount(); +2:1336,1337c +3:1298,1299c + int responsibleDomCount = serviceManager.getResponsibleDomCount(); + int responsibleIPCount = serviceManager.getResponsibleIPCount(); +====1 +1:2112c + result.put("notifyTask", RaftCore.notifier.getTaskSize()); +2:1345a +3:1307a +====1 +1:2198c + DistroMapper.onReceiveServerStatus(serverStatus); +2:1431c +3:1393c + distroMapper.onReceiveServerStatus(serverStatus); +====1 +1:2209c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1442c +3:1404c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2229c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1462c +3:1424c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2237c + result.put("responsibleServer", DistroMapper.mapSrv(dom)); +2:1470c +3:1432c + result.put("responsibleServer", distroMapper.mapSrv(dom)); +====1 +1:2246c + result.put("healthyList", DistroMapper.getHealthyList()); +2:1479c +3:1441c + result.put("healthyList", distroMapper.getHealthyList()); +====1 +1:2256c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1489c +3:1451c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2264c + result.put("responsible", DistroMapper.responsible(dom)); +2:1497c +3:1459c + result.put("responsible", distroMapper.responsible(dom)); +====1 +1:2275c + if (!NamingProxy.getServers().contains(serverIP)) { +2:1508c +3:1470c + if (!serverListManager.contains(serverIP)) { +====1 +1:2280c + DomainsManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, DomainsManager.DomainChecksum.class); +2:1513c +3:1475c + ServiceManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, ServiceManager.DomainChecksum.class); +====1 +1:2292c + Domain domain = domainsManager.getDomain(checksums.namespaceId, dom); +2:1525c +3:1487c + Domain domain = serviceManager.getService(checksums.namespaceId, dom); +====1 +1:2305c + domainsManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +2:1538c +3:1500c + serviceManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +====1 +1:2354c + pac.put("checkServer", DistroMapper.mapSrvName(vDom.getName())); +2:1587c +3:1549c + pac.put("checkServer", distroMapper.mapSrvName(vDom.getName())); +====1 +1:2432,2433c + public void setDomainsManager(DomainsManager domainsManager) { + this.domainsManager = domainsManager; +2:1665,1666c +3:1627,1628c + public void setServiceManager(ServiceManager serviceManager) { + this.serviceManager = serviceManager; diff --git a/src/python/merge_conflict_analysis_diffs/128/gitmerge_ort_adjacent/diff_ApiCommands.java.txt b/src/python/merge_conflict_analysis_diffs/128/gitmerge_ort_adjacent/diff_ApiCommands.java.txt new file mode 100644 index 0000000000..52d80407b5 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/128/gitmerge_ort_adjacent/diff_ApiCommands.java.txt @@ -0,0 +1,1481 @@ +====1 +1:27a +2:28c +3:28c + import com.alibaba.nacos.naming.cluster.ServerListManager; +====1 +1:35,42c + import com.alibaba.nacos.naming.raft.Datum; + import com.alibaba.nacos.naming.raft.RaftCore; + import com.alibaba.nacos.naming.raft.RaftPeer; + import com.alibaba.nacos.naming.raft.RaftProxy; + import com.ning.http.client.AsyncCompletionHandler; + import com.ning.http.client.Response; + import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; + import org.apache.catalina.util.ParameterMap; +2:35a +3:35a +====1 +1:65,69c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.TimeUnit; + import java.util.concurrent.locks.Condition; + import java.util.concurrent.locks.Lock; + import java.util.concurrent.locks.ReentrantLock; +2:57a +3:57a +====1 +1:84c + protected DomainsManager domainsManager; +2:72,87c +3:72,87c + protected ServiceManager serviceManager; + + @Autowired + private SwitchManager switchManager; + + @Autowired + private ServerListManager serverListManager; + + @Autowired + private SwitchDomain switchDomain; + + @Autowired + private PushService pushService; + + @Autowired + private DistroMapper distroMapper; +====1 +1:112c + result.put("cacheMillis", Switch.getPushCacheMillis(client.getDom())); +2:115c +3:115c + result.put("cacheMillis", switchDomain.getPushCacheMillis(client.getDom())); +====1 +1:126c + Domain dom = domainsManager.getDomain(namespaceId, name); +2:129c +3:129c + Domain dom = serviceManager.getService(namespaceId, name); +====1 +1:138c + result.put("count", domainsManager.getDomCount()); +2:141c +3:141c + result.put("count", serviceManager.getDomCount()); +====1 +1:151c + = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:154c +3:154c + = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:182c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:185c +3:185c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:212c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:215c +3:215c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:273c + if (domainsManager.getDomain(namespaceId, dom) != null) { +2:276c +3:276c + if (serviceManager.getService(namespaceId, dom) != null) { +====1 +1:292,294c + String dom = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(dom)) { + dom = WebUtils.required(request, "dom"); +2:295,297c +3:295,297c + String serviceName = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(serviceName)) { + serviceName = WebUtils.required(request, "dom"); +====1 +1:296,297c + String app; + app = WebUtils.optional(request, "app", StringUtils.EMPTY); +2:299c +3:299c + +====1 +1:305c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, dom); +2:307c +3:307c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, serviceName); +====1 +1:308,314c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + Map stringMap = new HashMap<>(16); + stringMap.put(Constants.REQUEST_PARAM_SERVICE_NAME, Arrays.asList(dom).toArray(new String[1])); + stringMap.put("enableClientBeat", Arrays.asList("true").toArray(new String[1])); + stringMap.put("cktype", Arrays.asList("TCP").toArray(new String[1])); + stringMap.put("appName", Arrays.asList(app).toArray(new String[1])); + stringMap.put("clusterName", Arrays.asList(clusterName).toArray(new String[1])); +2:310,311c +3:310,311c + IpAddress ipAddress = serviceManager.getInstance(namespaceId, serviceName, clientBeat.getCluster(), clientBeat.getIp(), + clientBeat.getPort()); +====1 +1:316,319c + //if domain does not exist, register it. + if (virtualClusterDomain == null) { + regDom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("dom not found, register it, dom: {}", dom); +2:313,322c +3:313,322c + if (ipAddress == null) { + ipAddress = new IpAddress(); + ipAddress.setPort(clientBeat.getPort()); + ipAddress.setIp(clientBeat.getIp()); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(serviceName); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:322,325c + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + String ip = clientBeat.getIp(); + int port = clientBeat.getPort(); +2:325c +3:325c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:327,354c + IpAddress ipAddress = new IpAddress(); + ipAddress.setPort(port); + ipAddress.setIp(ip); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(dom); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } + + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", Switch.getClientBeatInterval()); + + if (!virtualClusterDomain.allIPs().contains(ipAddress)) { + + if (!virtualClusterDomain.getEnableClientBeat()) { + return result; + } + + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("ip not found, register it, dom: {}, ip: {}", dom, ipAddress); +2:327,328c +3:327,328c + if (virtualClusterDomain == null) { + throw new NacosException(NacosException.SERVER_ERROR, "service not found: " + serviceName + "@" + namespaceId); +====1 +1:357,359c + if (!DistroMapper.responsible(dom)) { + String server = DistroMapper.mapSrv(dom); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", dom, server); +2:331,333c +3:331,333c + if (!distroMapper.responsible(serviceName)) { + String server = distroMapper.mapSrv(serviceName); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", serviceName, server); +====1 +1:381a +2:356,359c +3:356,359c + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", switchDomain.getClientBeatInterval()); + +====1 +1:385c + +2:362a +3:362a +====1 +1:400c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +2:377c +3:377c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +====1 +1:406c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +2:383c +3:383c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +====1 +1:503c + domainsManager.easyAddOrReplaceDom(domObj); +2:480c +3:480c + serviceManager.addOrReplaceService(domObj); +====1 +1:534c + String dom = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +2:511c +3:511c + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +====1 +1:536c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:513c +3:513c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:541,547c + ParameterMap parameterMap = new ParameterMap<>(); + parameterMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + parameterMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + parameterMap.put("json", Arrays.asList("true").toArray(new String[1])); + parameterMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + return remvIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, parameterMap)); +2:518c +3:518c + serviceManager.removeInstance(namespaceId, serviceName, ipAddress); +====1 +1:548a +2:520c +3:520c + return "ok"; +====1 +1:551c + @SuppressFBWarnings("JLM_JSR166_LOCK_MONITORENTER") +2:522a +3:522a +====1 +1:555,556c + String dom = WebUtils.required(request, "serviceName"); + String tenant = WebUtils.optional(request, "tid", StringUtils.EMPTY); +2:526,527c +3:526,527c + String serviceName = WebUtils.required(request, "serviceName"); + String clusterName = WebUtils.required(request, "clusterName"); +====1 +1:558c + String env = WebUtils.optional(request, "env", StringUtils.EMPTY); +2:528a +3:528a +====1 +1:560,562c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, UtilsAndCommons.getDefaultNamespaceId()); + + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:530,531c +3:530,531c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +====1 +1:566c + ipAddress.setServiceName(dom); +2:535c +3:535c + ipAddress.setServiceName(serviceName); +====1 +1:573,616c + if (virtualClusterDomain == null) { + + Lock lock = domainsManager.addLockIfAbsent(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + Condition condition = domainsManager.addCondtion(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + try { + regDom(request); + } catch (Exception e) { + Loggers.SRV_LOG.error("[REG-SERIVCE] register service failed, service:" + dom, e); + } + } + }); + try { + lock.lock(); + condition.await(5000, TimeUnit.MILLISECONDS); + } finally { + lock.unlock(); + } + + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + } + + if (virtualClusterDomain != null) { + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(request); + } + + if (Loggers.SRV_LOG.isDebugEnabled()) { + Loggers.SRV_LOG.debug("reg-service add ip: {}|{}", dom, ipAddress.toJSON()); + } + + Map stringMap = new HashMap<>(16); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } else { + throw new IllegalArgumentException("dom not found: " + dom); + } +2:542c +3:542c + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:621c + +2:546a +3:546a +====1 +1:628c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, name); +2:553c +3:553c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, name); +====1 +1:633,637c + RaftPeer leader = RaftCore.getLeader(); + if (leader == null) { + throw new IllegalStateException("not leader at present, cannot update"); + } + +2:557a +3:557a +====1 +1:678c + if (cktype.equals(AbstractHealthCheckProcessor.HTTP_PROCESSOR.getType())) { +2:598c +3:598c + if (cktype.equals(HealthCheckType.HTTP.name().toLowerCase())) { +====1 +1:683c + } else if (cktype.equals(AbstractHealthCheckProcessor.TCP_PROCESSOR.getType())) { +2:603c +3:603c + } else if (cktype.equals(HealthCheckType.TCP.name().toLowerCase())) { +====1 +1:687c + } else if (cktype.equals(AbstractHealthCheckProcessor.MYSQL_PROCESSOR.getType())) { +2:607c +3:607c + } else if (cktype.equals(HealthCheckType.MYSQL.name().toLowerCase())) { +====1 +1:779c + domainsManager.easyAddOrReplaceDom(dom); +2:699c +3:699c + serviceManager.addOrReplaceService(dom); +====1 +1:787c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + domainsManager.getDomCount() +2:707c +3:707c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + serviceManager.getDomCount() +====1 +1:792c + +2:711a +3:711a +====1 +1:800c + if (domainsManager.getDomain(namespaceId, dom) == null) { +2:719c +3:719c + if (serviceManager.getService(namespaceId, dom) == null) { +====1 +1:804c + domainsManager.easyRemoveDom(namespaceId, dom); +2:723c +3:723c + serviceManager.easyRemoveDom(namespaceId, dom); +====1 +1:814c + Map> domMap = domainsManager.getAllDomNames(); +2:733c +3:733c + Map> domMap = serviceManager.getAllDomNames(); +====1 +1:818c + Domain domObj = domainsManager.getDomain(namespaceId, dom); +2:737c +3:737c + Domain domObj = serviceManager.getService(namespaceId, dom); +====1 +1:841,979c + @RequestMapping("/onAddIP4Dom") + public String onAddIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer {} tried to publish data but wasn't leader, leader: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: {}, cur-term: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term.get()); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + final String dom = WebUtils.required(request, "dom"); + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", Boolean.FALSE.toString())); + + String ipListString = WebUtils.required(request, "ipList"); + List newIPs = new ArrayList<>(); + + List ipList; + if (Boolean.parseBoolean(WebUtils.optional(request, SwitchEntry.PARAM_JSON, Boolean.FALSE.toString()))) { + newIPs = JSON.parseObject(ipListString, new TypeReference>() { + }); + } else { + ipList = Arrays.asList(ipListString.split(",")); + for (String ip : ipList) { + IpAddress ipAddr = IpAddress.fromJSON(ip); + newIPs.add(ipAddr); + } + } + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domainsManager.getDomain(namespaceId, dom).allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + domainsManager.easyAddIP4Dom(namespaceId, dom, newIPs, term); + + return "ok"; + } + + private void syncOnUpdateIP4Dom(String namespaceId, String dom, Map proxyParams, String action) throws InterruptedException { + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + final CountDownLatch countDownLatch = new CountDownLatch(RaftCore.getPeerSet().majorityCount()); + updateIpPublish(proxyParams, countDownLatch, action); + if (!countDownLatch.await(UtilsAndCommons.MAX_PUBLISH_WAIT_TIME_MILLIS, TimeUnit.MILLISECONDS)) { + Loggers.RAFT.info("data publish failed, key=" + key, ",notify timeout."); + throw new IllegalArgumentException("data publish failed, key=" + key); + } + } + + private void syncOnAddIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void asyncOnAddIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void syncOnRemvIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void asyncOnRemvIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void updateIpPublish(Map proxyParams, CountDownLatch countDownLatch, String action) { + + for (final String peer : RaftCore.getPeerSet().allServersWithoutMySelf()) { + + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + + String server = peer; + + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String api = action.equals("remove") ? "onRemvIP4Dom" : "onAddIP4Dom"; + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/" + api; + + try { + HttpClient.asyncHttpPost(url, null, proxyParams, new AsyncCompletionHandler() { + @Override + public Integer onCompleted(Response response) throws Exception { + if (response.getStatusCode() != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip params: " + proxyParams + + ",code: " + response.getStatusCode() + ", caused " + response.getResponseBody() + + ", server: " + peer); + return 1; + } + if (countDownLatch != null) { + countDownLatch.countDown(); + } + return 0; + } + }); + } catch (Exception e) { + Loggers.SRV_LOG.error(action + "-IP", "failed when publish to peer." + url, e); + } + } + }); + } + } + +2:759a +3:759a +====1 +1:984c + if (Switch.getDisableAddIP()) { +2:764c +3:764c + if (switchDomain.isDisableAddIP()) { +====1 +1:988,989c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +2:767a +3:767a +====1 +1:999a +2:778,784c +3:778,784c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String clusterName = WebUtils.required(request, "clusterName"); + +====1 +1:1005c + ipList = Arrays.asList(ipListString); +2:789a +3:789a +====1 +1:1016,1110c + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/addIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip for dom, caused {}", result1.content); + throw new IllegalArgumentException("failed to add ip for dom, caused " + result1.content); + } + + return "ok"; + } + + final String dom = WebUtils.required(request, "dom"); + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", "false")); + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domain.allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + + String key = UtilsAndCommons.getIPListStoreKey(domain); + + Datum datum = RaftCore.getDatum(key); + if (datum == null) { + try { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).lock(); + datum = RaftCore.getDatum(key); + if (datum == null) { + datum = new Datum(); + datum.key = key; + RaftCore.addDatum(datum); + } + } finally { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).unlock(); + } + } + + long timestamp = RaftCore.getDatum(key).timestamp.get(); + + if (RaftCore.isLeader()) { + try { + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onAddIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnAddIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnAddIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + } +2:800c +3:800c + serviceManager.addInstance(namespaceId, serviceName, clusterName, newIPs.toArray(new IpAddress[newIPs.size()])); +====1 +1:1119c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:809c +3:809c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:1127c + long cacheMillis = Switch.getCacheMillis(dom); +2:817c +3:817c + long cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1131,1132c + if (udpPort > 0 && PushService.canEnablePush(agent)) { + PushService.addClient(namespaceId, dom, +2:821,822c +3:821,822c + if (udpPort > 0 && pushService.canEnablePush(agent)) { + pushService.addClient(namespaceId, dom, +====1 +1:1139c + cacheMillis = Switch.getPushCacheMillis(dom); +2:829c +3:829c + cacheMillis = switchDomain.getPushCacheMillis(dom); +====1 +1:1143c + cacheMillis = Switch.getCacheMillis(dom); +2:833c +3:833c + cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1233,1274c + @RequestMapping("/onRemvIP4Dom") + public void onRemvIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Deleting IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer(" + JSON.toJSONString(clientIP) + ") tried to publish " + + "data but wasn't leader, leader: " + JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: " + + JSON.toJSONString(clientIP) + ", cur-term: " + JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + final String dom = WebUtils.required(request, "dom"); + final String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + List removedIPs = getIpAddresses(request); + + if (CollectionUtils.isEmpty(removedIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + domainsManager.easyRemvIP4Dom(namespaceId, dom, removedIPs, term); + } + +2:922a +3:922a +====1 +1:1279,1280c + if (DistroMapper.getLocalhostIP().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + DistroMapper.getLocalhostIP()); +2:927,928c +3:927,928c + if (NetUtils.localServer().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + NetUtils.localServer()); +====1 +1:1308,1314c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + String dom = WebUtils.required(request, "dom"); + String ipListString = WebUtils.required(request, "ipList"); + + if (Loggers.DEBUG_LOG.isDebugEnabled()) { + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: serviceName: {}, iplist: {}", dom, ipListString); +2:956,957c +3:956,957c + if (switchDomain.isDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); +====1 +1:1323c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments, params: {}", proxyParams); +2:966c +3:966c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: {}", proxyParams); +====1 +1:1326c + List ipList = new ArrayList<>(); +2:969,976c +3:969,976c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String ipListString = WebUtils.required(request, "ipList"); + final List ipList; + List removedIPs = new ArrayList<>(); +====1 +1:1328c + List ipObjList = new ArrayList<>(ipList.size()); +2:977a +3:977a +====1 +1:1330,1331c + ipList = Arrays.asList(ipListString); + ipObjList = JSON.parseObject(ipListString, new TypeReference>() { +2:979c +3:979c + removedIPs = JSON.parseObject(ipListString, new TypeReference>() { +====1 +1:1336,1359c + ipObjList.add(IpAddress.fromJSON(ip)); + } + } + + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/remvIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to remove ip for dom, caused: {}", result1.content); + throw new IllegalArgumentException("failed to remove ip for dom, caused " + result1.content); +2:984,985c +3:984,985c + IpAddress ipAddr = IpAddress.fromJSON(ip); + removedIPs.add(ipAddr); +====1 +1:1361,1379c + + return "ok"; + } + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + if (CollectionUtils.isEmpty(ipObjList)) { + throw new IllegalArgumentException("Empty ip list"); + } + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + long timestamp = 1; + if (RaftCore.getDatum(key) != null) { + timestamp = RaftCore.getDatum(key).timestamp.get(); +2:986a +3:986a +====1 +1:1382,1412c + if (RaftCore.isLeader()) { + + try { + + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onRemvIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnRemvIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnRemvIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + Loggers.EVT_LOG.info("dom: {} {POS} {IP-REMV} new: {} operatorIP: {}", + dom, ipListString, WebUtils.optional(request, "clientIP", "unknown")); + } +2:989c +3:989c + serviceManager.removeInstance(namespaceId, serviceName, removedIPs.toArray(new IpAddress[removedIPs.size()])); +====1 +1:1426,1428c + int failedPushCount = PushService.getFailedPushCount(); + result.put("succeed", PushService.getTotalPush() - failedPushCount); + result.put("total", PushService.getTotalPush()); +2:1003,1005c +3:1003,1005c + int failedPushCount = pushService.getFailedPushCount(); + result.put("succeed", pushService.getTotalPush() - failedPushCount); + result.put("total", pushService.getTotalPush()); +====1 +1:1430,1431c + if (PushService.getTotalPush() > 0) { + result.put("ratio", ((float) PushService.getTotalPush() - failedPushCount) / PushService.getTotalPush()); +2:1007,1008c +3:1007,1008c + if (pushService.getTotalPush() > 0) { + result.put("ratio", ((float) pushService.getTotalPush() - failedPushCount) / pushService.getTotalPush()); +====1 +1:1457,1459c + + ReentrantLock lock = new ReentrantLock(); + +2:1033a +3:1033a +====1 +1:1463a +2:1038,1039c +3:1038,1039c + String entry = WebUtils.required(request, "entry"); + String value = WebUtils.required(request, "value"); +====1 +1:1465,1846c + if (!RaftCore.isLeader() && !debug) { + Map tmpParams = new HashMap<>(16); + for (Map.Entry entry : request.getParameterMap().entrySet()) { + tmpParams.put(entry.getKey(), entry.getValue()[0]); + } + + RaftProxy.proxyGET(UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/updateSwitch", tmpParams); + return "ok"; + } + + try { + lock.lock(); + String entry = WebUtils.required(request, "entry"); + + Datum datum = RaftCore.getDatum(UtilsAndCommons.DOMAINS_DATA_ID_PRE + UtilsAndCommons.SWITCH_DOMAIN_NAME); + SwitchDomain switchDomain = null; + + if (datum != null) { + switchDomain = JSON.parseObject(datum.value, SwitchDomain.class); + } else { + Loggers.SRV_LOG.warn("datum: {}{} is null", UtilsAndCommons.DOMAINS_DATA_ID_PRE, UtilsAndCommons.SWITCH_DOMAIN_NAME); + } + + if (SwitchEntry.BATCH.equals(entry)) { + //batch update + SwitchDomain dom = JSON.parseObject(WebUtils.required(request, "json"), SwitchDomain.class); + dom.setEnableStandalone(Switch.isEnableStandalone()); + if (dom.httpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN + || dom.tcpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN) { + + throw new IllegalArgumentException("min check time for http or tcp is too small(<500)"); + } + + if (dom.httpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX + || dom.tcpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX) { + + throw new IllegalArgumentException("max check time for http or tcp is too small(<3000)"); + } + + if (dom.httpHealthParams.getFactor() < 0 + || dom.httpHealthParams.getFactor() > 1 + || dom.tcpHealthParams.getFactor() < 0 + || dom.tcpHealthParams.getFactor() > 1) { + + throw new IllegalArgumentException("malformed factor"); + } + + Switch.setDom(dom); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (switchDomain != null) { + Switch.setDom(switchDomain); + } + + if (entry.equals(SwitchEntry.DISTRO_THRESHOLD)) { + Float threshold = Float.parseFloat(WebUtils.required(request, "distroThreshold")); + + if (threshold <= 0) { + throw new IllegalArgumentException("distroThreshold can not be zero or negative: " + threshold); + } + + Switch.setDistroThreshold(threshold); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + + if (entry.equals(SwitchEntry.ENABLE_ALL_DOM_NAME_CACHE)) { + Boolean enable = Boolean.parseBoolean(WebUtils.required(request, "enableAllDomNameCache")); + Switch.setAllDomNameCache(enable); + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.INCREMENTAL_LIST)) { + String action = WebUtils.required(request, "action"); + List doms = Arrays.asList(WebUtils.required(request, "incrementalList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getIncrementalList().addAll(doms); + } else if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getIncrementalList().removeAll(doms); + } else { + throw new IllegalArgumentException("action is not allowed: " + action); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_WHITLE_LIST)) { + String action = WebUtils.required(request, "action"); + List whiteList = Arrays.asList(WebUtils.required(request, "healthCheckWhiteList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getHealthCheckWhiteList().addAll(whiteList); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getHealthCheckWhiteList().removeAll(whiteList); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.CLIENT_BEAT_INTERVAL)) { + long clientBeatInterval = Long.parseLong(WebUtils.required(request, "clientBeatInterval")); + Switch.setClientBeatInterval(clientBeatInterval); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setPushJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setPushPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setPushCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_GO, type)) { + Switch.setPushGoVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.TRAFFIC_SCHEDULING_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setTrafficSchedulingJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setTrafficSchedulingPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setTrafficSchedulingCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_TENGINE, type)) { + Switch.setTrafficSchedulingTengineVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_PUSH_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min cache time for http or tcp is too small(<10000)"); + } + + Switch.setPushCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + // extremely careful while modifying this, cause it will affect all clients without pushing enabled + if (entry.equals(SwitchEntry.DEFAULT_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min default cache time is too small(<1000)"); + } + + Switch.setCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.MASTERS)) { + List masters = Arrays.asList(WebUtils.required(request, "names").split(",")); + + Switch.setMasters(masters); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISTRO)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setDistroEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.CHECK)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setHeathCheckEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DEFAULT_HEALTH_CHECK_MODE)) { + String defaultHealthCheckMode = WebUtils.required(request, "mode"); + + Switch.setDefaultHealthCheckMode(defaultHealthCheckMode); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DOM_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_DOM_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("domStatusSynchronizationPeriodMillis is too small(<5000)"); + } + + Switch.setDomStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SERVER_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_SERVER_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("serverStatusSynchronizationPeriodMillis is too small(<15000)"); + } + + Switch.setServerStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_TIMES)) { + Integer times = Integer.parseInt(WebUtils.required(request, "times")); + + Switch.setCheckTimes(times); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISABLE_ADD_IP)) { + boolean disableAddIP = Boolean.parseBoolean(WebUtils.required(request, "disableAddIP")); + + Switch.setDisableAddIP(disableAddIP); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.ENABLE_CACHE)) { + boolean enableCache = Boolean.parseBoolean(WebUtils.required(request, "enableCache")); + + Switch.setEnableCache(enableCache); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SEND_BEAT_ONLY)) { + boolean sendBeatOnly = Boolean.parseBoolean(WebUtils.required(request, "sendBeatOnly")); + + Switch.setSendBeatOnly(sendBeatOnly); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.LIMITED_URL_MAP)) { + Map limitedUrlMap = new HashMap<>(16); + String limitedUrls = WebUtils.required(request, "limitedUrls"); + + if (!StringUtils.isEmpty(limitedUrls)) { + String[] entries = limitedUrls.split(","); + for (int i = 0; i < entries.length; i++) { + String[] parts = entries[i].split(":"); + if (parts.length < 2) { + throw new IllegalArgumentException("invalid input for limited urls"); + } + + String limitedUrl = parts[0]; + if (StringUtils.isEmpty(limitedUrl)) { + throw new IllegalArgumentException("url can not be empty, url: " + limitedUrl); + } + + int statusCode = Integer.parseInt(parts[1]); + if (statusCode <= 0) { + throw new IllegalArgumentException("illegal normal status code: " + statusCode); + } + + limitedUrlMap.put(limitedUrl, statusCode); + + } + + Switch.setLimitedUrlMap(limitedUrlMap); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.ENABLE_STANDALONE)) { + String enable = WebUtils.required(request, "enableStandalone"); + + if (!StringUtils.isNotEmpty(enable)) { + Switch.setEnableStandalone(Boolean.parseBoolean(enable)); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + + throw new IllegalArgumentException("update entry not found: " + entry); + } finally { + lock.unlock(); + } + +2:1041c +3:1041c + switchManager.update(entry, value, debug); +====1 +1:1847a +2:1043c +3:1043c + return "ok"; +====1 +1:1858c + return JSON.parseObject(Switch.getDom().toJSON()); +2:1054c +3:1054c + return JSON.parseObject(switchDomain.toJSON()); +====1 +1:1906c + Map> domMap = domainsManager.getAllDomNames(); +2:1102c +3:1102c + Map> domMap = serviceManager.getAllDomNames(); +====1 +1:1911c + if (DistroMapper.responsible(dom) || !responsibleOnly) { +2:1107c +3:1107c + if (distroMapper.responsible(dom) || !responsibleOnly) { +====1 +1:1933,1934c + List doms + = domainsManager.searchDomains(namespaceId, ".*" + expr + ".*"); +2:1129,1130c +3:1129,1130c + List doms + = serviceManager.searchDomains(namespaceId, ".*" + expr + ".*"); +====1 +1:1980c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1176c +3:1176c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2063c + domainsManager.easyAddOrReplaceDom(domObj); +2:1259c +3:1259c + serviceManager.addOrReplaceService(domObj); +====1 +1:2082c + result.put("status", DistroMapper.getDistroConfig()); +2:1278c +3:1278c + result.put("status", distroMapper.getDistroConfig()); +====1 +1:2087c + DistroMapper.clean(); +2:1283c +3:1283c + distroMapper.clean(); +====1 +1:2099,2100c + int domCount = domainsManager.getDomCount(); + int ipCount = domainsManager.getInstanceCount(); +2:1295,1296c +3:1295,1296c + int domCount = serviceManager.getDomCount(); + int ipCount = serviceManager.getInstanceCount(); +====1 +1:2102,2103c + int responsibleDomCount = domainsManager.getResponsibleDomCount(); + int responsibleIPCount = domainsManager.getResponsibleIPCount(); +2:1298,1299c +3:1298,1299c + int responsibleDomCount = serviceManager.getResponsibleDomCount(); + int responsibleIPCount = serviceManager.getResponsibleIPCount(); +====1 +1:2112c + result.put("notifyTask", RaftCore.notifier.getTaskSize()); +2:1307a +3:1307a +====1 +1:2198c + DistroMapper.onReceiveServerStatus(serverStatus); +2:1393c +3:1393c + distroMapper.onReceiveServerStatus(serverStatus); +====1 +1:2209c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1404c +3:1404c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2229c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1424c +3:1424c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2237c + result.put("responsibleServer", DistroMapper.mapSrv(dom)); +2:1432c +3:1432c + result.put("responsibleServer", distroMapper.mapSrv(dom)); +====1 +1:2246c + result.put("healthyList", DistroMapper.getHealthyList()); +2:1441c +3:1441c + result.put("healthyList", distroMapper.getHealthyList()); +====1 +1:2256c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1451c +3:1451c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2264c + result.put("responsible", DistroMapper.responsible(dom)); +2:1459c +3:1459c + result.put("responsible", distroMapper.responsible(dom)); +====1 +1:2275c + if (!NamingProxy.getServers().contains(serverIP)) { +2:1470c +3:1470c + if (!serverListManager.contains(serverIP)) { +====1 +1:2280c + DomainsManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, DomainsManager.DomainChecksum.class); +2:1475c +3:1475c + ServiceManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, ServiceManager.DomainChecksum.class); +====1 +1:2292c + Domain domain = domainsManager.getDomain(checksums.namespaceId, dom); +2:1487c +3:1487c + Domain domain = serviceManager.getService(checksums.namespaceId, dom); +====1 +1:2305c + domainsManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +2:1500c +3:1500c + serviceManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +====1 +1:2354c + pac.put("checkServer", DistroMapper.mapSrvName(vDom.getName())); +2:1549c +3:1549c + pac.put("checkServer", distroMapper.mapSrvName(vDom.getName())); +====1 +1:2432,2433c + public void setDomainsManager(DomainsManager domainsManager) { + this.domainsManager = domainsManager; +2:1627,1628c +3:1627,1628c + public void setServiceManager(ServiceManager serviceManager) { + this.serviceManager = serviceManager; diff --git a/src/python/merge_conflict_analysis_diffs/128/gitmerge_ort_ignorespace/diff_ApiCommands.java.txt b/src/python/merge_conflict_analysis_diffs/128/gitmerge_ort_ignorespace/diff_ApiCommands.java.txt new file mode 100644 index 0000000000..6d144cc367 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/128/gitmerge_ort_ignorespace/diff_ApiCommands.java.txt @@ -0,0 +1,1549 @@ +====1 +1:27a +2:28c +3:28c + import com.alibaba.nacos.naming.cluster.ServerListManager; +====1 +1:35,42c + import com.alibaba.nacos.naming.raft.Datum; + import com.alibaba.nacos.naming.raft.RaftCore; + import com.alibaba.nacos.naming.raft.RaftPeer; + import com.alibaba.nacos.naming.raft.RaftProxy; + import com.ning.http.client.AsyncCompletionHandler; + import com.ning.http.client.Response; + import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; + import org.apache.catalina.util.ParameterMap; +2:35a +3:35a +====1 +1:65,69c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.TimeUnit; + import java.util.concurrent.locks.Condition; + import java.util.concurrent.locks.Lock; + import java.util.concurrent.locks.ReentrantLock; +2:57a +3:57a +====1 +1:84c + protected DomainsManager domainsManager; +2:72,87c +3:72,87c + protected ServiceManager serviceManager; + + @Autowired + private SwitchManager switchManager; + + @Autowired + private ServerListManager serverListManager; + + @Autowired + private SwitchDomain switchDomain; + + @Autowired + private PushService pushService; + + @Autowired + private DistroMapper distroMapper; +====1 +1:112c + result.put("cacheMillis", Switch.getPushCacheMillis(client.getDom())); +2:115c +3:115c + result.put("cacheMillis", switchDomain.getPushCacheMillis(client.getDom())); +====1 +1:126c + Domain dom = domainsManager.getDomain(namespaceId, name); +2:129c +3:129c + Domain dom = serviceManager.getService(namespaceId, name); +====1 +1:138c + result.put("count", domainsManager.getDomCount()); +2:141c +3:141c + result.put("count", serviceManager.getDomCount()); +====1 +1:151c + = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:154c +3:154c + = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:182c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:185c +3:185c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:212c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:215c +3:215c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:273c + if (domainsManager.getDomain(namespaceId, dom) != null) { +2:276c +3:276c + if (serviceManager.getService(namespaceId, dom) != null) { +====1 +1:292,294c + String dom = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(dom)) { + dom = WebUtils.required(request, "dom"); +2:295,297c +3:295,297c + String serviceName = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(serviceName)) { + serviceName = WebUtils.required(request, "dom"); +====1 +1:296,297c + String app; + app = WebUtils.optional(request, "app", StringUtils.EMPTY); +2:299c +3:299c + +====1 +1:305c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, dom); +2:307c +3:307c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, serviceName); +====1 +1:308,314c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + Map stringMap = new HashMap<>(16); + stringMap.put(Constants.REQUEST_PARAM_SERVICE_NAME, Arrays.asList(dom).toArray(new String[1])); + stringMap.put("enableClientBeat", Arrays.asList("true").toArray(new String[1])); + stringMap.put("cktype", Arrays.asList("TCP").toArray(new String[1])); + stringMap.put("appName", Arrays.asList(app).toArray(new String[1])); + stringMap.put("clusterName", Arrays.asList(clusterName).toArray(new String[1])); +2:310,311c +3:310,311c + IpAddress ipAddress = serviceManager.getInstance(namespaceId, serviceName, clientBeat.getCluster(), clientBeat.getIp(), + clientBeat.getPort()); +====1 +1:316,319c + //if domain does not exist, register it. + if (virtualClusterDomain == null) { + regDom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("dom not found, register it, dom: {}", dom); +2:313,322c +3:313,322c + if (ipAddress == null) { + ipAddress = new IpAddress(); + ipAddress.setPort(clientBeat.getPort()); + ipAddress.setIp(clientBeat.getIp()); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(serviceName); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:322,325c + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + String ip = clientBeat.getIp(); + int port = clientBeat.getPort(); +2:325c +3:325c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:327,354c + IpAddress ipAddress = new IpAddress(); + ipAddress.setPort(port); + ipAddress.setIp(ip); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(dom); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } + + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", Switch.getClientBeatInterval()); + + if (!virtualClusterDomain.allIPs().contains(ipAddress)) { + + if (!virtualClusterDomain.getEnableClientBeat()) { + return result; + } + + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("ip not found, register it, dom: {}, ip: {}", dom, ipAddress); +2:327,328c +3:327,328c + if (virtualClusterDomain == null) { + throw new NacosException(NacosException.SERVER_ERROR, "service not found: " + serviceName + "@" + namespaceId); +====1 +1:357,359c + if (!DistroMapper.responsible(dom)) { + String server = DistroMapper.mapSrv(dom); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", dom, server); +2:331,333c +3:331,333c + if (!distroMapper.responsible(serviceName)) { + String server = distroMapper.mapSrv(serviceName); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", serviceName, server); +====1 +1:381a +2:356,359c +3:356,359c + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", switchDomain.getClientBeatInterval()); + +====1 +1:385c + +2:362a +3:362a +====1 +1:400c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +2:377c +3:377c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +====1 +1:406c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +2:383c +3:383c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +====1 +1:503c + domainsManager.easyAddOrReplaceDom(domObj); +2:480c +3:480c + serviceManager.addOrReplaceService(domObj); +====1 +1:534c + String dom = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +2:511c +3:511c + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +====1 +1:536c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:513c +3:513c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:541,547c + ParameterMap parameterMap = new ParameterMap<>(); + parameterMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + parameterMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + parameterMap.put("json", Arrays.asList("true").toArray(new String[1])); + parameterMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + return remvIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, parameterMap)); +2:518c +3:518c + serviceManager.removeInstance(namespaceId, serviceName, ipAddress); +====1 +1:548a +2:520c +3:520c + return "ok"; +====1 +1:551c + @SuppressFBWarnings("JLM_JSR166_LOCK_MONITORENTER") +2:522a +3:522a +====1 +1:555,556c + String dom = WebUtils.required(request, "serviceName"); + String tenant = WebUtils.optional(request, "tid", StringUtils.EMPTY); +2:526,527c +3:526,527c + String serviceName = WebUtils.required(request, "serviceName"); + String clusterName = WebUtils.required(request, "clusterName"); +====1 +1:558c + String env = WebUtils.optional(request, "env", StringUtils.EMPTY); +2:528a +3:528a +====1 +1:560,562c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, UtilsAndCommons.getDefaultNamespaceId()); + + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:530,531c +3:530,531c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +====1 +1:566c + ipAddress.setServiceName(dom); +2:535c +3:535c + ipAddress.setServiceName(serviceName); +====1 +1:573,616c + if (virtualClusterDomain == null) { + + Lock lock = domainsManager.addLockIfAbsent(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + Condition condition = domainsManager.addCondtion(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + try { + regDom(request); + } catch (Exception e) { + Loggers.SRV_LOG.error("[REG-SERIVCE] register service failed, service:" + dom, e); + } + } + }); + try { + lock.lock(); + condition.await(5000, TimeUnit.MILLISECONDS); + } finally { + lock.unlock(); + } + + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + } + + if (virtualClusterDomain != null) { + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(request); + } + + if (Loggers.SRV_LOG.isDebugEnabled()) { + Loggers.SRV_LOG.debug("reg-service add ip: {}|{}", dom, ipAddress.toJSON()); + } + + Map stringMap = new HashMap<>(16); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } else { + throw new IllegalArgumentException("dom not found: " + dom); + } +2:542c +3:542c + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:621c + +2:546a +3:546a +====1 +1:628c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, name); +2:553c +3:553c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, name); +====1 +1:633,637c + RaftPeer leader = RaftCore.getLeader(); + if (leader == null) { + throw new IllegalStateException("not leader at present, cannot update"); + } + +2:557a +3:557a +====1 +1:678c + if (cktype.equals(AbstractHealthCheckProcessor.HTTP_PROCESSOR.getType())) { +2:598c +3:598c + if (cktype.equals(HealthCheckType.HTTP.name().toLowerCase())) { +====1 +1:683c + } else if (cktype.equals(AbstractHealthCheckProcessor.TCP_PROCESSOR.getType())) { +2:603c +3:603c + } else if (cktype.equals(HealthCheckType.TCP.name().toLowerCase())) { +====1 +1:687c + } else if (cktype.equals(AbstractHealthCheckProcessor.MYSQL_PROCESSOR.getType())) { +2:607c +3:607c + } else if (cktype.equals(HealthCheckType.MYSQL.name().toLowerCase())) { +====1 +1:779c + domainsManager.easyAddOrReplaceDom(dom); +2:699c +3:699c + serviceManager.addOrReplaceService(dom); +====1 +1:787c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + domainsManager.getDomCount() +2:707c +3:707c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + serviceManager.getDomCount() +====1 +1:792c + +2:711a +3:711a +====1 +1:800c + if (domainsManager.getDomain(namespaceId, dom) == null) { +2:719c +3:719c + if (serviceManager.getService(namespaceId, dom) == null) { +====1 +1:804c + domainsManager.easyRemoveDom(namespaceId, dom); +2:723c +3:723c + serviceManager.easyRemoveDom(namespaceId, dom); +====1 +1:814c + Map> domMap = domainsManager.getAllDomNames(); +2:733c +3:733c + Map> domMap = serviceManager.getAllDomNames(); +====1 +1:818c + Domain domObj = domainsManager.getDomain(namespaceId, dom); +2:737c +3:737c + Domain domObj = serviceManager.getService(namespaceId, dom); +====1 +1:841,979c + @RequestMapping("/onAddIP4Dom") + public String onAddIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer {} tried to publish data but wasn't leader, leader: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: {}, cur-term: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term.get()); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + final String dom = WebUtils.required(request, "dom"); + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", Boolean.FALSE.toString())); + + String ipListString = WebUtils.required(request, "ipList"); + List newIPs = new ArrayList<>(); + + List ipList; + if (Boolean.parseBoolean(WebUtils.optional(request, SwitchEntry.PARAM_JSON, Boolean.FALSE.toString()))) { + newIPs = JSON.parseObject(ipListString, new TypeReference>() { + }); + } else { + ipList = Arrays.asList(ipListString.split(",")); + for (String ip : ipList) { + IpAddress ipAddr = IpAddress.fromJSON(ip); + newIPs.add(ipAddr); + } + } + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domainsManager.getDomain(namespaceId, dom).allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + domainsManager.easyAddIP4Dom(namespaceId, dom, newIPs, term); + + return "ok"; + } + + private void syncOnUpdateIP4Dom(String namespaceId, String dom, Map proxyParams, String action) throws InterruptedException { + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + final CountDownLatch countDownLatch = new CountDownLatch(RaftCore.getPeerSet().majorityCount()); + updateIpPublish(proxyParams, countDownLatch, action); + if (!countDownLatch.await(UtilsAndCommons.MAX_PUBLISH_WAIT_TIME_MILLIS, TimeUnit.MILLISECONDS)) { + Loggers.RAFT.info("data publish failed, key=" + key, ",notify timeout."); + throw new IllegalArgumentException("data publish failed, key=" + key); + } + } + + private void syncOnAddIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void asyncOnAddIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void syncOnRemvIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void asyncOnRemvIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void updateIpPublish(Map proxyParams, CountDownLatch countDownLatch, String action) { + + for (final String peer : RaftCore.getPeerSet().allServersWithoutMySelf()) { + + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + + String server = peer; + + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String api = action.equals("remove") ? "onRemvIP4Dom" : "onAddIP4Dom"; + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/" + api; + + try { + HttpClient.asyncHttpPost(url, null, proxyParams, new AsyncCompletionHandler() { + @Override + public Integer onCompleted(Response response) throws Exception { + if (response.getStatusCode() != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip params: " + proxyParams + + ",code: " + response.getStatusCode() + ", caused " + response.getResponseBody() + + ", server: " + peer); + return 1; + } + if (countDownLatch != null) { + countDownLatch.countDown(); + } + return 0; + } + }); + } catch (Exception e) { + Loggers.SRV_LOG.error(action + "-IP", "failed when publish to peer." + url, e); + } + } + }); + } + } + +2:759a +3:759a +====1 +1:984c + if (Switch.getDisableAddIP()) { +2:764c +3:764c + if (switchDomain.isDisableAddIP()) { +====1 +1:988,989c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +2:767a +3:767a +====1 +1:999a +2:778,784c +3:778,784c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String clusterName = WebUtils.required(request, "clusterName"); + +====1 +1:1005c + ipList = Arrays.asList(ipListString); +2:789a +3:789a +====1 +1:1016,1110c + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/addIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip for dom, caused {}", result1.content); + throw new IllegalArgumentException("failed to add ip for dom, caused " + result1.content); + } + + return "ok"; + } + + final String dom = WebUtils.required(request, "dom"); + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", "false")); + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domain.allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + + String key = UtilsAndCommons.getIPListStoreKey(domain); + + Datum datum = RaftCore.getDatum(key); + if (datum == null) { + try { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).lock(); + datum = RaftCore.getDatum(key); + if (datum == null) { + datum = new Datum(); + datum.key = key; + RaftCore.addDatum(datum); + } + } finally { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).unlock(); + } + } + + long timestamp = RaftCore.getDatum(key).timestamp.get(); + + if (RaftCore.isLeader()) { + try { + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onAddIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnAddIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnAddIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + } +2:800c +3:800c + serviceManager.addInstance(namespaceId, serviceName, clusterName, newIPs.toArray(new IpAddress[newIPs.size()])); +====1 +1:1119c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:809c +3:809c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:1127c + long cacheMillis = Switch.getCacheMillis(dom); +2:817c +3:817c + long cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1131,1132c + if (udpPort > 0 && PushService.canEnablePush(agent)) { + PushService.addClient(namespaceId, dom, +2:821,822c +3:821,822c + if (udpPort > 0 && pushService.canEnablePush(agent)) { + pushService.addClient(namespaceId, dom, +====1 +1:1139c + cacheMillis = Switch.getPushCacheMillis(dom); +2:829c +3:829c + cacheMillis = switchDomain.getPushCacheMillis(dom); +====1 +1:1143c + cacheMillis = Switch.getCacheMillis(dom); +2:833c +3:833c + cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1233,1274c + @RequestMapping("/onRemvIP4Dom") + public void onRemvIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Deleting IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer(" + JSON.toJSONString(clientIP) + ") tried to publish " + + "data but wasn't leader, leader: " + JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: " + + JSON.toJSONString(clientIP) + ", cur-term: " + JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + final String dom = WebUtils.required(request, "dom"); + final String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + List removedIPs = getIpAddresses(request); + + if (CollectionUtils.isEmpty(removedIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + domainsManager.easyRemvIP4Dom(namespaceId, dom, removedIPs, term); + } + +2:922a +3:922a +====1 +1:1279,1280c + if (DistroMapper.getLocalhostIP().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + DistroMapper.getLocalhostIP()); +2:927,928c +3:927,928c + if (NetUtils.localServer().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + NetUtils.localServer()); +====1 +1:1308,1314c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + String dom = WebUtils.required(request, "dom"); + String ipListString = WebUtils.required(request, "ipList"); + + if (Loggers.DEBUG_LOG.isDebugEnabled()) { + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: serviceName: {}, iplist: {}", dom, ipListString); +2:956,957c +3:956,957c + if (switchDomain.isDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); +====1 +1:1323c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments, params: {}", proxyParams); +2:966c +3:966c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: {}", proxyParams); +====1 +1:1326c + List ipList = new ArrayList<>(); +2:969,976c +3:969,976c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String ipListString = WebUtils.required(request, "ipList"); + final List ipList; + List removedIPs = new ArrayList<>(); +====1 +1:1328c + List ipObjList = new ArrayList<>(ipList.size()); +2:977a +3:977a +====1 +1:1330,1331c + ipList = Arrays.asList(ipListString); + ipObjList = JSON.parseObject(ipListString, new TypeReference>() { +2:979c +3:979c + removedIPs = JSON.parseObject(ipListString, new TypeReference>() { +====1 +1:1336,1359c + ipObjList.add(IpAddress.fromJSON(ip)); + } + } + + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/remvIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to remove ip for dom, caused: {}", result1.content); + throw new IllegalArgumentException("failed to remove ip for dom, caused " + result1.content); +2:984,985c +3:984,985c + IpAddress ipAddr = IpAddress.fromJSON(ip); + removedIPs.add(ipAddr); +====1 +1:1361,1379c + + return "ok"; + } + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + if (CollectionUtils.isEmpty(ipObjList)) { + throw new IllegalArgumentException("Empty ip list"); + } + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + long timestamp = 1; + if (RaftCore.getDatum(key) != null) { + timestamp = RaftCore.getDatum(key).timestamp.get(); +2:986a +3:986a +====1 +1:1382,1412c + if (RaftCore.isLeader()) { + + try { + + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onRemvIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnRemvIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnRemvIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + Loggers.EVT_LOG.info("dom: {} {POS} {IP-REMV} new: {} operatorIP: {}", + dom, ipListString, WebUtils.optional(request, "clientIP", "unknown")); + } +2:989c +3:989c + serviceManager.removeInstance(namespaceId, serviceName, removedIPs.toArray(new IpAddress[removedIPs.size()])); +====1 +1:1426,1428c + int failedPushCount = PushService.getFailedPushCount(); + result.put("succeed", PushService.getTotalPush() - failedPushCount); + result.put("total", PushService.getTotalPush()); +2:1003,1005c +3:1003,1005c + int failedPushCount = pushService.getFailedPushCount(); + result.put("succeed", pushService.getTotalPush() - failedPushCount); + result.put("total", pushService.getTotalPush()); +====1 +1:1430,1431c + if (PushService.getTotalPush() > 0) { + result.put("ratio", ((float) PushService.getTotalPush() - failedPushCount) / PushService.getTotalPush()); +2:1007,1008c +3:1007,1008c + if (pushService.getTotalPush() > 0) { + result.put("ratio", ((float) pushService.getTotalPush() - failedPushCount) / pushService.getTotalPush()); +====1 +1:1457,1459c + + ReentrantLock lock = new ReentrantLock(); + +2:1033a +3:1033a +====1 +1:1463a +2:1038,1039c +3:1038,1039c + String entry = WebUtils.required(request, "entry"); + String value = WebUtils.required(request, "value"); +====1 +1:1465,1846c + if (!RaftCore.isLeader() && !debug) { + Map tmpParams = new HashMap<>(16); + for (Map.Entry entry : request.getParameterMap().entrySet()) { + tmpParams.put(entry.getKey(), entry.getValue()[0]); + } + + RaftProxy.proxyGET(UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/updateSwitch", tmpParams); + return "ok"; + } + + try { + lock.lock(); + String entry = WebUtils.required(request, "entry"); + + Datum datum = RaftCore.getDatum(UtilsAndCommons.DOMAINS_DATA_ID_PRE + UtilsAndCommons.SWITCH_DOMAIN_NAME); + SwitchDomain switchDomain = null; + + if (datum != null) { + switchDomain = JSON.parseObject(datum.value, SwitchDomain.class); + } else { + Loggers.SRV_LOG.warn("datum: {}{} is null", UtilsAndCommons.DOMAINS_DATA_ID_PRE, UtilsAndCommons.SWITCH_DOMAIN_NAME); + } + + if (SwitchEntry.BATCH.equals(entry)) { + //batch update + SwitchDomain dom = JSON.parseObject(WebUtils.required(request, "json"), SwitchDomain.class); + dom.setEnableStandalone(Switch.isEnableStandalone()); + if (dom.httpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN + || dom.tcpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN) { + + throw new IllegalArgumentException("min check time for http or tcp is too small(<500)"); + } + + if (dom.httpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX + || dom.tcpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX) { + + throw new IllegalArgumentException("max check time for http or tcp is too small(<3000)"); + } + + if (dom.httpHealthParams.getFactor() < 0 + || dom.httpHealthParams.getFactor() > 1 + || dom.tcpHealthParams.getFactor() < 0 + || dom.tcpHealthParams.getFactor() > 1) { + + throw new IllegalArgumentException("malformed factor"); + } + + Switch.setDom(dom); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (switchDomain != null) { + Switch.setDom(switchDomain); + } + + if (entry.equals(SwitchEntry.DISTRO_THRESHOLD)) { + Float threshold = Float.parseFloat(WebUtils.required(request, "distroThreshold")); + + if (threshold <= 0) { + throw new IllegalArgumentException("distroThreshold can not be zero or negative: " + threshold); + } + + Switch.setDistroThreshold(threshold); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + + if (entry.equals(SwitchEntry.ENABLE_ALL_DOM_NAME_CACHE)) { + Boolean enable = Boolean.parseBoolean(WebUtils.required(request, "enableAllDomNameCache")); + Switch.setAllDomNameCache(enable); + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.INCREMENTAL_LIST)) { + String action = WebUtils.required(request, "action"); + List doms = Arrays.asList(WebUtils.required(request, "incrementalList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getIncrementalList().addAll(doms); + } else if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getIncrementalList().removeAll(doms); + } else { + throw new IllegalArgumentException("action is not allowed: " + action); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_WHITLE_LIST)) { + String action = WebUtils.required(request, "action"); + List whiteList = Arrays.asList(WebUtils.required(request, "healthCheckWhiteList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getHealthCheckWhiteList().addAll(whiteList); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getHealthCheckWhiteList().removeAll(whiteList); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.CLIENT_BEAT_INTERVAL)) { + long clientBeatInterval = Long.parseLong(WebUtils.required(request, "clientBeatInterval")); + Switch.setClientBeatInterval(clientBeatInterval); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setPushJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setPushPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setPushCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_GO, type)) { + Switch.setPushGoVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.TRAFFIC_SCHEDULING_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setTrafficSchedulingJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setTrafficSchedulingPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setTrafficSchedulingCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_TENGINE, type)) { + Switch.setTrafficSchedulingTengineVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_PUSH_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min cache time for http or tcp is too small(<10000)"); + } + + Switch.setPushCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + // extremely careful while modifying this, cause it will affect all clients without pushing enabled + if (entry.equals(SwitchEntry.DEFAULT_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min default cache time is too small(<1000)"); + } + + Switch.setCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.MASTERS)) { + List masters = Arrays.asList(WebUtils.required(request, "names").split(",")); + + Switch.setMasters(masters); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISTRO)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setDistroEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.CHECK)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setHeathCheckEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DEFAULT_HEALTH_CHECK_MODE)) { + String defaultHealthCheckMode = WebUtils.required(request, "mode"); + + Switch.setDefaultHealthCheckMode(defaultHealthCheckMode); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DOM_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_DOM_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("domStatusSynchronizationPeriodMillis is too small(<5000)"); + } + + Switch.setDomStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SERVER_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_SERVER_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("serverStatusSynchronizationPeriodMillis is too small(<15000)"); + } + + Switch.setServerStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_TIMES)) { + Integer times = Integer.parseInt(WebUtils.required(request, "times")); + + Switch.setCheckTimes(times); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISABLE_ADD_IP)) { + boolean disableAddIP = Boolean.parseBoolean(WebUtils.required(request, "disableAddIP")); + + Switch.setDisableAddIP(disableAddIP); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.ENABLE_CACHE)) { + boolean enableCache = Boolean.parseBoolean(WebUtils.required(request, "enableCache")); + + Switch.setEnableCache(enableCache); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SEND_BEAT_ONLY)) { + boolean sendBeatOnly = Boolean.parseBoolean(WebUtils.required(request, "sendBeatOnly")); + + Switch.setSendBeatOnly(sendBeatOnly); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.LIMITED_URL_MAP)) { + Map limitedUrlMap = new HashMap<>(16); + String limitedUrls = WebUtils.required(request, "limitedUrls"); + + if (!StringUtils.isEmpty(limitedUrls)) { + String[] entries = limitedUrls.split(","); + for (int i = 0; i < entries.length; i++) { + String[] parts = entries[i].split(":"); + if (parts.length < 2) { + throw new IllegalArgumentException("invalid input for limited urls"); + } + + String limitedUrl = parts[0]; + if (StringUtils.isEmpty(limitedUrl)) { + throw new IllegalArgumentException("url can not be empty, url: " + limitedUrl); + } + + int statusCode = Integer.parseInt(parts[1]); + if (statusCode <= 0) { + throw new IllegalArgumentException("illegal normal status code: " + statusCode); + } + + limitedUrlMap.put(limitedUrl, statusCode); + + } + + Switch.setLimitedUrlMap(limitedUrlMap); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.ENABLE_STANDALONE)) { + String enable = WebUtils.required(request, "enableStandalone"); + + if (!StringUtils.isNotEmpty(enable)) { + Switch.setEnableStandalone(Boolean.parseBoolean(enable)); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + + throw new IllegalArgumentException("update entry not found: " + entry); + } finally { + lock.unlock(); + } + +2:1041c +3:1041c + switchManager.update(entry, value, debug); +====1 +1:1847a +2:1043c +3:1043c + return "ok"; +====1 +1:1858c + return JSON.parseObject(Switch.getDom().toJSON()); +2:1054c +3:1054c + return JSON.parseObject(switchDomain.toJSON()); +====2 +1:1902a +3:1098a +2:1099,1129c + Map> domMap = domainsManager.getAllDomNames(); + JSONObject result = new JSONObject(); + // For old DNS-F client: + String dnsfVersion = "1.0.1"; + String agent = request.getHeader("Client-Version"); + ClientInfo clientInfo = new ClientInfo(agent); + if (clientInfo.type == ClientInfo.ClientType.DNS && clientInfo.version.compareTo(VersionUtil.parseVersion(dnsfVersion)) <= 0) { + + List doms = new ArrayList(); + Set domSet = null; + + if (domMap.containsKey(Constants.REQUEST_PARAM_DEFAULT_NAMESPACE_ID)) { + domSet = domMap.get(Constants.REQUEST_PARAM_DEFAULT_NAMESPACE_ID); + } + + if (CollectionUtils.isEmpty(domSet)) { + result.put("doms", new HashSet<>()); + result.put("count", 0); + return result; + } + + for (String dom : domSet) { + if (DistroMapper.responsible(dom) || !responsibleOnly) { + doms.add(dom); + } + } + + result.put("doms", doms); + result.put("count", doms.size()); + return result; + } +====2 +1:1904a +3:1100a +2:1132c + <<<<<<< HEAD +====1 +1:1906c + Map> domMap = domainsManager.getAllDomNames(); +2:1134c +3:1102c + Map> domMap = serviceManager.getAllDomNames(); +====2 +1:1907a +3:1103a +2:1136,1142c + ||||||| c863cbcde + + Map> domMap = domainsManager.getAllDomNames(); + + ======= + int count = 0; + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:1911c + if (DistroMapper.responsible(dom) || !responsibleOnly) { +2:1146c +3:1107c + if (distroMapper.responsible(dom) || !responsibleOnly) { +====2 +1:1914a +3:1110a +2:1150c + count += doms.get(namespaceId).size(); +====2 +1:1917,1918c +3:1113,1114c + JSONObject result = new JSONObject(); + +2:1152a +====2 +1:1920c +3:1116c + result.put("count", doms.size()); +2:1154c + result.put("count", count); +====1 +1:1933,1934c + List doms + = domainsManager.searchDomains(namespaceId, ".*" + expr + ".*"); +2:1167,1168c +3:1129,1130c + List doms + = serviceManager.searchDomains(namespaceId, ".*" + expr + ".*"); +====1 +1:1980c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1214c +3:1176c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2063c + domainsManager.easyAddOrReplaceDom(domObj); +2:1297c +3:1259c + serviceManager.addOrReplaceService(domObj); +====1 +1:2082c + result.put("status", DistroMapper.getDistroConfig()); +2:1316c +3:1278c + result.put("status", distroMapper.getDistroConfig()); +====1 +1:2087c + DistroMapper.clean(); +2:1321c +3:1283c + distroMapper.clean(); +====1 +1:2099,2100c + int domCount = domainsManager.getDomCount(); + int ipCount = domainsManager.getInstanceCount(); +2:1333,1334c +3:1295,1296c + int domCount = serviceManager.getDomCount(); + int ipCount = serviceManager.getInstanceCount(); +====1 +1:2102,2103c + int responsibleDomCount = domainsManager.getResponsibleDomCount(); + int responsibleIPCount = domainsManager.getResponsibleIPCount(); +2:1336,1337c +3:1298,1299c + int responsibleDomCount = serviceManager.getResponsibleDomCount(); + int responsibleIPCount = serviceManager.getResponsibleIPCount(); +====1 +1:2112c + result.put("notifyTask", RaftCore.notifier.getTaskSize()); +2:1345a +3:1307a +====1 +1:2198c + DistroMapper.onReceiveServerStatus(serverStatus); +2:1431c +3:1393c + distroMapper.onReceiveServerStatus(serverStatus); +====1 +1:2209c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1442c +3:1404c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2229c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1462c +3:1424c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2237c + result.put("responsibleServer", DistroMapper.mapSrv(dom)); +2:1470c +3:1432c + result.put("responsibleServer", distroMapper.mapSrv(dom)); +====1 +1:2246c + result.put("healthyList", DistroMapper.getHealthyList()); +2:1479c +3:1441c + result.put("healthyList", distroMapper.getHealthyList()); +====1 +1:2256c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1489c +3:1451c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2264c + result.put("responsible", DistroMapper.responsible(dom)); +2:1497c +3:1459c + result.put("responsible", distroMapper.responsible(dom)); +====1 +1:2275c + if (!NamingProxy.getServers().contains(serverIP)) { +2:1508c +3:1470c + if (!serverListManager.contains(serverIP)) { +====1 +1:2280c + DomainsManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, DomainsManager.DomainChecksum.class); +2:1513c +3:1475c + ServiceManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, ServiceManager.DomainChecksum.class); +====1 +1:2292c + Domain domain = domainsManager.getDomain(checksums.namespaceId, dom); +2:1525c +3:1487c + Domain domain = serviceManager.getService(checksums.namespaceId, dom); +====1 +1:2305c + domainsManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +2:1538c +3:1500c + serviceManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +====1 +1:2354c + pac.put("checkServer", DistroMapper.mapSrvName(vDom.getName())); +2:1587c +3:1549c + pac.put("checkServer", distroMapper.mapSrvName(vDom.getName())); +====1 +1:2432,2433c + public void setDomainsManager(DomainsManager domainsManager) { + this.domainsManager = domainsManager; +2:1665,1666c +3:1627,1628c + public void setServiceManager(ServiceManager serviceManager) { + this.serviceManager = serviceManager; diff --git a/src/python/merge_conflict_analysis_diffs/128/gitmerge_ort_imports/diff_ApiCommands.java.txt b/src/python/merge_conflict_analysis_diffs/128/gitmerge_ort_imports/diff_ApiCommands.java.txt new file mode 100644 index 0000000000..52d80407b5 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/128/gitmerge_ort_imports/diff_ApiCommands.java.txt @@ -0,0 +1,1481 @@ +====1 +1:27a +2:28c +3:28c + import com.alibaba.nacos.naming.cluster.ServerListManager; +====1 +1:35,42c + import com.alibaba.nacos.naming.raft.Datum; + import com.alibaba.nacos.naming.raft.RaftCore; + import com.alibaba.nacos.naming.raft.RaftPeer; + import com.alibaba.nacos.naming.raft.RaftProxy; + import com.ning.http.client.AsyncCompletionHandler; + import com.ning.http.client.Response; + import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; + import org.apache.catalina.util.ParameterMap; +2:35a +3:35a +====1 +1:65,69c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.TimeUnit; + import java.util.concurrent.locks.Condition; + import java.util.concurrent.locks.Lock; + import java.util.concurrent.locks.ReentrantLock; +2:57a +3:57a +====1 +1:84c + protected DomainsManager domainsManager; +2:72,87c +3:72,87c + protected ServiceManager serviceManager; + + @Autowired + private SwitchManager switchManager; + + @Autowired + private ServerListManager serverListManager; + + @Autowired + private SwitchDomain switchDomain; + + @Autowired + private PushService pushService; + + @Autowired + private DistroMapper distroMapper; +====1 +1:112c + result.put("cacheMillis", Switch.getPushCacheMillis(client.getDom())); +2:115c +3:115c + result.put("cacheMillis", switchDomain.getPushCacheMillis(client.getDom())); +====1 +1:126c + Domain dom = domainsManager.getDomain(namespaceId, name); +2:129c +3:129c + Domain dom = serviceManager.getService(namespaceId, name); +====1 +1:138c + result.put("count", domainsManager.getDomCount()); +2:141c +3:141c + result.put("count", serviceManager.getDomCount()); +====1 +1:151c + = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:154c +3:154c + = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:182c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:185c +3:185c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:212c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:215c +3:215c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:273c + if (domainsManager.getDomain(namespaceId, dom) != null) { +2:276c +3:276c + if (serviceManager.getService(namespaceId, dom) != null) { +====1 +1:292,294c + String dom = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(dom)) { + dom = WebUtils.required(request, "dom"); +2:295,297c +3:295,297c + String serviceName = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(serviceName)) { + serviceName = WebUtils.required(request, "dom"); +====1 +1:296,297c + String app; + app = WebUtils.optional(request, "app", StringUtils.EMPTY); +2:299c +3:299c + +====1 +1:305c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, dom); +2:307c +3:307c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, serviceName); +====1 +1:308,314c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + Map stringMap = new HashMap<>(16); + stringMap.put(Constants.REQUEST_PARAM_SERVICE_NAME, Arrays.asList(dom).toArray(new String[1])); + stringMap.put("enableClientBeat", Arrays.asList("true").toArray(new String[1])); + stringMap.put("cktype", Arrays.asList("TCP").toArray(new String[1])); + stringMap.put("appName", Arrays.asList(app).toArray(new String[1])); + stringMap.put("clusterName", Arrays.asList(clusterName).toArray(new String[1])); +2:310,311c +3:310,311c + IpAddress ipAddress = serviceManager.getInstance(namespaceId, serviceName, clientBeat.getCluster(), clientBeat.getIp(), + clientBeat.getPort()); +====1 +1:316,319c + //if domain does not exist, register it. + if (virtualClusterDomain == null) { + regDom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("dom not found, register it, dom: {}", dom); +2:313,322c +3:313,322c + if (ipAddress == null) { + ipAddress = new IpAddress(); + ipAddress.setPort(clientBeat.getPort()); + ipAddress.setIp(clientBeat.getIp()); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(serviceName); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:322,325c + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + String ip = clientBeat.getIp(); + int port = clientBeat.getPort(); +2:325c +3:325c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:327,354c + IpAddress ipAddress = new IpAddress(); + ipAddress.setPort(port); + ipAddress.setIp(ip); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(dom); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } + + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", Switch.getClientBeatInterval()); + + if (!virtualClusterDomain.allIPs().contains(ipAddress)) { + + if (!virtualClusterDomain.getEnableClientBeat()) { + return result; + } + + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("ip not found, register it, dom: {}, ip: {}", dom, ipAddress); +2:327,328c +3:327,328c + if (virtualClusterDomain == null) { + throw new NacosException(NacosException.SERVER_ERROR, "service not found: " + serviceName + "@" + namespaceId); +====1 +1:357,359c + if (!DistroMapper.responsible(dom)) { + String server = DistroMapper.mapSrv(dom); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", dom, server); +2:331,333c +3:331,333c + if (!distroMapper.responsible(serviceName)) { + String server = distroMapper.mapSrv(serviceName); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", serviceName, server); +====1 +1:381a +2:356,359c +3:356,359c + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", switchDomain.getClientBeatInterval()); + +====1 +1:385c + +2:362a +3:362a +====1 +1:400c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +2:377c +3:377c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +====1 +1:406c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +2:383c +3:383c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +====1 +1:503c + domainsManager.easyAddOrReplaceDom(domObj); +2:480c +3:480c + serviceManager.addOrReplaceService(domObj); +====1 +1:534c + String dom = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +2:511c +3:511c + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +====1 +1:536c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:513c +3:513c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:541,547c + ParameterMap parameterMap = new ParameterMap<>(); + parameterMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + parameterMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + parameterMap.put("json", Arrays.asList("true").toArray(new String[1])); + parameterMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + return remvIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, parameterMap)); +2:518c +3:518c + serviceManager.removeInstance(namespaceId, serviceName, ipAddress); +====1 +1:548a +2:520c +3:520c + return "ok"; +====1 +1:551c + @SuppressFBWarnings("JLM_JSR166_LOCK_MONITORENTER") +2:522a +3:522a +====1 +1:555,556c + String dom = WebUtils.required(request, "serviceName"); + String tenant = WebUtils.optional(request, "tid", StringUtils.EMPTY); +2:526,527c +3:526,527c + String serviceName = WebUtils.required(request, "serviceName"); + String clusterName = WebUtils.required(request, "clusterName"); +====1 +1:558c + String env = WebUtils.optional(request, "env", StringUtils.EMPTY); +2:528a +3:528a +====1 +1:560,562c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, UtilsAndCommons.getDefaultNamespaceId()); + + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:530,531c +3:530,531c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +====1 +1:566c + ipAddress.setServiceName(dom); +2:535c +3:535c + ipAddress.setServiceName(serviceName); +====1 +1:573,616c + if (virtualClusterDomain == null) { + + Lock lock = domainsManager.addLockIfAbsent(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + Condition condition = domainsManager.addCondtion(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + try { + regDom(request); + } catch (Exception e) { + Loggers.SRV_LOG.error("[REG-SERIVCE] register service failed, service:" + dom, e); + } + } + }); + try { + lock.lock(); + condition.await(5000, TimeUnit.MILLISECONDS); + } finally { + lock.unlock(); + } + + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + } + + if (virtualClusterDomain != null) { + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(request); + } + + if (Loggers.SRV_LOG.isDebugEnabled()) { + Loggers.SRV_LOG.debug("reg-service add ip: {}|{}", dom, ipAddress.toJSON()); + } + + Map stringMap = new HashMap<>(16); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } else { + throw new IllegalArgumentException("dom not found: " + dom); + } +2:542c +3:542c + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:621c + +2:546a +3:546a +====1 +1:628c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, name); +2:553c +3:553c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, name); +====1 +1:633,637c + RaftPeer leader = RaftCore.getLeader(); + if (leader == null) { + throw new IllegalStateException("not leader at present, cannot update"); + } + +2:557a +3:557a +====1 +1:678c + if (cktype.equals(AbstractHealthCheckProcessor.HTTP_PROCESSOR.getType())) { +2:598c +3:598c + if (cktype.equals(HealthCheckType.HTTP.name().toLowerCase())) { +====1 +1:683c + } else if (cktype.equals(AbstractHealthCheckProcessor.TCP_PROCESSOR.getType())) { +2:603c +3:603c + } else if (cktype.equals(HealthCheckType.TCP.name().toLowerCase())) { +====1 +1:687c + } else if (cktype.equals(AbstractHealthCheckProcessor.MYSQL_PROCESSOR.getType())) { +2:607c +3:607c + } else if (cktype.equals(HealthCheckType.MYSQL.name().toLowerCase())) { +====1 +1:779c + domainsManager.easyAddOrReplaceDom(dom); +2:699c +3:699c + serviceManager.addOrReplaceService(dom); +====1 +1:787c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + domainsManager.getDomCount() +2:707c +3:707c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + serviceManager.getDomCount() +====1 +1:792c + +2:711a +3:711a +====1 +1:800c + if (domainsManager.getDomain(namespaceId, dom) == null) { +2:719c +3:719c + if (serviceManager.getService(namespaceId, dom) == null) { +====1 +1:804c + domainsManager.easyRemoveDom(namespaceId, dom); +2:723c +3:723c + serviceManager.easyRemoveDom(namespaceId, dom); +====1 +1:814c + Map> domMap = domainsManager.getAllDomNames(); +2:733c +3:733c + Map> domMap = serviceManager.getAllDomNames(); +====1 +1:818c + Domain domObj = domainsManager.getDomain(namespaceId, dom); +2:737c +3:737c + Domain domObj = serviceManager.getService(namespaceId, dom); +====1 +1:841,979c + @RequestMapping("/onAddIP4Dom") + public String onAddIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer {} tried to publish data but wasn't leader, leader: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: {}, cur-term: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term.get()); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + final String dom = WebUtils.required(request, "dom"); + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", Boolean.FALSE.toString())); + + String ipListString = WebUtils.required(request, "ipList"); + List newIPs = new ArrayList<>(); + + List ipList; + if (Boolean.parseBoolean(WebUtils.optional(request, SwitchEntry.PARAM_JSON, Boolean.FALSE.toString()))) { + newIPs = JSON.parseObject(ipListString, new TypeReference>() { + }); + } else { + ipList = Arrays.asList(ipListString.split(",")); + for (String ip : ipList) { + IpAddress ipAddr = IpAddress.fromJSON(ip); + newIPs.add(ipAddr); + } + } + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domainsManager.getDomain(namespaceId, dom).allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + domainsManager.easyAddIP4Dom(namespaceId, dom, newIPs, term); + + return "ok"; + } + + private void syncOnUpdateIP4Dom(String namespaceId, String dom, Map proxyParams, String action) throws InterruptedException { + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + final CountDownLatch countDownLatch = new CountDownLatch(RaftCore.getPeerSet().majorityCount()); + updateIpPublish(proxyParams, countDownLatch, action); + if (!countDownLatch.await(UtilsAndCommons.MAX_PUBLISH_WAIT_TIME_MILLIS, TimeUnit.MILLISECONDS)) { + Loggers.RAFT.info("data publish failed, key=" + key, ",notify timeout."); + throw new IllegalArgumentException("data publish failed, key=" + key); + } + } + + private void syncOnAddIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void asyncOnAddIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void syncOnRemvIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void asyncOnRemvIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void updateIpPublish(Map proxyParams, CountDownLatch countDownLatch, String action) { + + for (final String peer : RaftCore.getPeerSet().allServersWithoutMySelf()) { + + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + + String server = peer; + + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String api = action.equals("remove") ? "onRemvIP4Dom" : "onAddIP4Dom"; + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/" + api; + + try { + HttpClient.asyncHttpPost(url, null, proxyParams, new AsyncCompletionHandler() { + @Override + public Integer onCompleted(Response response) throws Exception { + if (response.getStatusCode() != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip params: " + proxyParams + + ",code: " + response.getStatusCode() + ", caused " + response.getResponseBody() + + ", server: " + peer); + return 1; + } + if (countDownLatch != null) { + countDownLatch.countDown(); + } + return 0; + } + }); + } catch (Exception e) { + Loggers.SRV_LOG.error(action + "-IP", "failed when publish to peer." + url, e); + } + } + }); + } + } + +2:759a +3:759a +====1 +1:984c + if (Switch.getDisableAddIP()) { +2:764c +3:764c + if (switchDomain.isDisableAddIP()) { +====1 +1:988,989c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +2:767a +3:767a +====1 +1:999a +2:778,784c +3:778,784c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String clusterName = WebUtils.required(request, "clusterName"); + +====1 +1:1005c + ipList = Arrays.asList(ipListString); +2:789a +3:789a +====1 +1:1016,1110c + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/addIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip for dom, caused {}", result1.content); + throw new IllegalArgumentException("failed to add ip for dom, caused " + result1.content); + } + + return "ok"; + } + + final String dom = WebUtils.required(request, "dom"); + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", "false")); + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domain.allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + + String key = UtilsAndCommons.getIPListStoreKey(domain); + + Datum datum = RaftCore.getDatum(key); + if (datum == null) { + try { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).lock(); + datum = RaftCore.getDatum(key); + if (datum == null) { + datum = new Datum(); + datum.key = key; + RaftCore.addDatum(datum); + } + } finally { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).unlock(); + } + } + + long timestamp = RaftCore.getDatum(key).timestamp.get(); + + if (RaftCore.isLeader()) { + try { + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onAddIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnAddIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnAddIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + } +2:800c +3:800c + serviceManager.addInstance(namespaceId, serviceName, clusterName, newIPs.toArray(new IpAddress[newIPs.size()])); +====1 +1:1119c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:809c +3:809c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:1127c + long cacheMillis = Switch.getCacheMillis(dom); +2:817c +3:817c + long cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1131,1132c + if (udpPort > 0 && PushService.canEnablePush(agent)) { + PushService.addClient(namespaceId, dom, +2:821,822c +3:821,822c + if (udpPort > 0 && pushService.canEnablePush(agent)) { + pushService.addClient(namespaceId, dom, +====1 +1:1139c + cacheMillis = Switch.getPushCacheMillis(dom); +2:829c +3:829c + cacheMillis = switchDomain.getPushCacheMillis(dom); +====1 +1:1143c + cacheMillis = Switch.getCacheMillis(dom); +2:833c +3:833c + cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1233,1274c + @RequestMapping("/onRemvIP4Dom") + public void onRemvIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Deleting IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer(" + JSON.toJSONString(clientIP) + ") tried to publish " + + "data but wasn't leader, leader: " + JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: " + + JSON.toJSONString(clientIP) + ", cur-term: " + JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + final String dom = WebUtils.required(request, "dom"); + final String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + List removedIPs = getIpAddresses(request); + + if (CollectionUtils.isEmpty(removedIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + domainsManager.easyRemvIP4Dom(namespaceId, dom, removedIPs, term); + } + +2:922a +3:922a +====1 +1:1279,1280c + if (DistroMapper.getLocalhostIP().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + DistroMapper.getLocalhostIP()); +2:927,928c +3:927,928c + if (NetUtils.localServer().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + NetUtils.localServer()); +====1 +1:1308,1314c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + String dom = WebUtils.required(request, "dom"); + String ipListString = WebUtils.required(request, "ipList"); + + if (Loggers.DEBUG_LOG.isDebugEnabled()) { + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: serviceName: {}, iplist: {}", dom, ipListString); +2:956,957c +3:956,957c + if (switchDomain.isDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); +====1 +1:1323c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments, params: {}", proxyParams); +2:966c +3:966c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: {}", proxyParams); +====1 +1:1326c + List ipList = new ArrayList<>(); +2:969,976c +3:969,976c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String ipListString = WebUtils.required(request, "ipList"); + final List ipList; + List removedIPs = new ArrayList<>(); +====1 +1:1328c + List ipObjList = new ArrayList<>(ipList.size()); +2:977a +3:977a +====1 +1:1330,1331c + ipList = Arrays.asList(ipListString); + ipObjList = JSON.parseObject(ipListString, new TypeReference>() { +2:979c +3:979c + removedIPs = JSON.parseObject(ipListString, new TypeReference>() { +====1 +1:1336,1359c + ipObjList.add(IpAddress.fromJSON(ip)); + } + } + + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/remvIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to remove ip for dom, caused: {}", result1.content); + throw new IllegalArgumentException("failed to remove ip for dom, caused " + result1.content); +2:984,985c +3:984,985c + IpAddress ipAddr = IpAddress.fromJSON(ip); + removedIPs.add(ipAddr); +====1 +1:1361,1379c + + return "ok"; + } + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + if (CollectionUtils.isEmpty(ipObjList)) { + throw new IllegalArgumentException("Empty ip list"); + } + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + long timestamp = 1; + if (RaftCore.getDatum(key) != null) { + timestamp = RaftCore.getDatum(key).timestamp.get(); +2:986a +3:986a +====1 +1:1382,1412c + if (RaftCore.isLeader()) { + + try { + + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onRemvIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnRemvIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnRemvIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + Loggers.EVT_LOG.info("dom: {} {POS} {IP-REMV} new: {} operatorIP: {}", + dom, ipListString, WebUtils.optional(request, "clientIP", "unknown")); + } +2:989c +3:989c + serviceManager.removeInstance(namespaceId, serviceName, removedIPs.toArray(new IpAddress[removedIPs.size()])); +====1 +1:1426,1428c + int failedPushCount = PushService.getFailedPushCount(); + result.put("succeed", PushService.getTotalPush() - failedPushCount); + result.put("total", PushService.getTotalPush()); +2:1003,1005c +3:1003,1005c + int failedPushCount = pushService.getFailedPushCount(); + result.put("succeed", pushService.getTotalPush() - failedPushCount); + result.put("total", pushService.getTotalPush()); +====1 +1:1430,1431c + if (PushService.getTotalPush() > 0) { + result.put("ratio", ((float) PushService.getTotalPush() - failedPushCount) / PushService.getTotalPush()); +2:1007,1008c +3:1007,1008c + if (pushService.getTotalPush() > 0) { + result.put("ratio", ((float) pushService.getTotalPush() - failedPushCount) / pushService.getTotalPush()); +====1 +1:1457,1459c + + ReentrantLock lock = new ReentrantLock(); + +2:1033a +3:1033a +====1 +1:1463a +2:1038,1039c +3:1038,1039c + String entry = WebUtils.required(request, "entry"); + String value = WebUtils.required(request, "value"); +====1 +1:1465,1846c + if (!RaftCore.isLeader() && !debug) { + Map tmpParams = new HashMap<>(16); + for (Map.Entry entry : request.getParameterMap().entrySet()) { + tmpParams.put(entry.getKey(), entry.getValue()[0]); + } + + RaftProxy.proxyGET(UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/updateSwitch", tmpParams); + return "ok"; + } + + try { + lock.lock(); + String entry = WebUtils.required(request, "entry"); + + Datum datum = RaftCore.getDatum(UtilsAndCommons.DOMAINS_DATA_ID_PRE + UtilsAndCommons.SWITCH_DOMAIN_NAME); + SwitchDomain switchDomain = null; + + if (datum != null) { + switchDomain = JSON.parseObject(datum.value, SwitchDomain.class); + } else { + Loggers.SRV_LOG.warn("datum: {}{} is null", UtilsAndCommons.DOMAINS_DATA_ID_PRE, UtilsAndCommons.SWITCH_DOMAIN_NAME); + } + + if (SwitchEntry.BATCH.equals(entry)) { + //batch update + SwitchDomain dom = JSON.parseObject(WebUtils.required(request, "json"), SwitchDomain.class); + dom.setEnableStandalone(Switch.isEnableStandalone()); + if (dom.httpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN + || dom.tcpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN) { + + throw new IllegalArgumentException("min check time for http or tcp is too small(<500)"); + } + + if (dom.httpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX + || dom.tcpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX) { + + throw new IllegalArgumentException("max check time for http or tcp is too small(<3000)"); + } + + if (dom.httpHealthParams.getFactor() < 0 + || dom.httpHealthParams.getFactor() > 1 + || dom.tcpHealthParams.getFactor() < 0 + || dom.tcpHealthParams.getFactor() > 1) { + + throw new IllegalArgumentException("malformed factor"); + } + + Switch.setDom(dom); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (switchDomain != null) { + Switch.setDom(switchDomain); + } + + if (entry.equals(SwitchEntry.DISTRO_THRESHOLD)) { + Float threshold = Float.parseFloat(WebUtils.required(request, "distroThreshold")); + + if (threshold <= 0) { + throw new IllegalArgumentException("distroThreshold can not be zero or negative: " + threshold); + } + + Switch.setDistroThreshold(threshold); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + + if (entry.equals(SwitchEntry.ENABLE_ALL_DOM_NAME_CACHE)) { + Boolean enable = Boolean.parseBoolean(WebUtils.required(request, "enableAllDomNameCache")); + Switch.setAllDomNameCache(enable); + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.INCREMENTAL_LIST)) { + String action = WebUtils.required(request, "action"); + List doms = Arrays.asList(WebUtils.required(request, "incrementalList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getIncrementalList().addAll(doms); + } else if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getIncrementalList().removeAll(doms); + } else { + throw new IllegalArgumentException("action is not allowed: " + action); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_WHITLE_LIST)) { + String action = WebUtils.required(request, "action"); + List whiteList = Arrays.asList(WebUtils.required(request, "healthCheckWhiteList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getHealthCheckWhiteList().addAll(whiteList); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getHealthCheckWhiteList().removeAll(whiteList); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.CLIENT_BEAT_INTERVAL)) { + long clientBeatInterval = Long.parseLong(WebUtils.required(request, "clientBeatInterval")); + Switch.setClientBeatInterval(clientBeatInterval); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setPushJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setPushPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setPushCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_GO, type)) { + Switch.setPushGoVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.TRAFFIC_SCHEDULING_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setTrafficSchedulingJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setTrafficSchedulingPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setTrafficSchedulingCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_TENGINE, type)) { + Switch.setTrafficSchedulingTengineVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_PUSH_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min cache time for http or tcp is too small(<10000)"); + } + + Switch.setPushCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + // extremely careful while modifying this, cause it will affect all clients without pushing enabled + if (entry.equals(SwitchEntry.DEFAULT_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min default cache time is too small(<1000)"); + } + + Switch.setCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.MASTERS)) { + List masters = Arrays.asList(WebUtils.required(request, "names").split(",")); + + Switch.setMasters(masters); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISTRO)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setDistroEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.CHECK)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setHeathCheckEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DEFAULT_HEALTH_CHECK_MODE)) { + String defaultHealthCheckMode = WebUtils.required(request, "mode"); + + Switch.setDefaultHealthCheckMode(defaultHealthCheckMode); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DOM_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_DOM_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("domStatusSynchronizationPeriodMillis is too small(<5000)"); + } + + Switch.setDomStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SERVER_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_SERVER_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("serverStatusSynchronizationPeriodMillis is too small(<15000)"); + } + + Switch.setServerStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_TIMES)) { + Integer times = Integer.parseInt(WebUtils.required(request, "times")); + + Switch.setCheckTimes(times); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISABLE_ADD_IP)) { + boolean disableAddIP = Boolean.parseBoolean(WebUtils.required(request, "disableAddIP")); + + Switch.setDisableAddIP(disableAddIP); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.ENABLE_CACHE)) { + boolean enableCache = Boolean.parseBoolean(WebUtils.required(request, "enableCache")); + + Switch.setEnableCache(enableCache); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SEND_BEAT_ONLY)) { + boolean sendBeatOnly = Boolean.parseBoolean(WebUtils.required(request, "sendBeatOnly")); + + Switch.setSendBeatOnly(sendBeatOnly); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.LIMITED_URL_MAP)) { + Map limitedUrlMap = new HashMap<>(16); + String limitedUrls = WebUtils.required(request, "limitedUrls"); + + if (!StringUtils.isEmpty(limitedUrls)) { + String[] entries = limitedUrls.split(","); + for (int i = 0; i < entries.length; i++) { + String[] parts = entries[i].split(":"); + if (parts.length < 2) { + throw new IllegalArgumentException("invalid input for limited urls"); + } + + String limitedUrl = parts[0]; + if (StringUtils.isEmpty(limitedUrl)) { + throw new IllegalArgumentException("url can not be empty, url: " + limitedUrl); + } + + int statusCode = Integer.parseInt(parts[1]); + if (statusCode <= 0) { + throw new IllegalArgumentException("illegal normal status code: " + statusCode); + } + + limitedUrlMap.put(limitedUrl, statusCode); + + } + + Switch.setLimitedUrlMap(limitedUrlMap); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.ENABLE_STANDALONE)) { + String enable = WebUtils.required(request, "enableStandalone"); + + if (!StringUtils.isNotEmpty(enable)) { + Switch.setEnableStandalone(Boolean.parseBoolean(enable)); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + + throw new IllegalArgumentException("update entry not found: " + entry); + } finally { + lock.unlock(); + } + +2:1041c +3:1041c + switchManager.update(entry, value, debug); +====1 +1:1847a +2:1043c +3:1043c + return "ok"; +====1 +1:1858c + return JSON.parseObject(Switch.getDom().toJSON()); +2:1054c +3:1054c + return JSON.parseObject(switchDomain.toJSON()); +====1 +1:1906c + Map> domMap = domainsManager.getAllDomNames(); +2:1102c +3:1102c + Map> domMap = serviceManager.getAllDomNames(); +====1 +1:1911c + if (DistroMapper.responsible(dom) || !responsibleOnly) { +2:1107c +3:1107c + if (distroMapper.responsible(dom) || !responsibleOnly) { +====1 +1:1933,1934c + List doms + = domainsManager.searchDomains(namespaceId, ".*" + expr + ".*"); +2:1129,1130c +3:1129,1130c + List doms + = serviceManager.searchDomains(namespaceId, ".*" + expr + ".*"); +====1 +1:1980c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1176c +3:1176c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2063c + domainsManager.easyAddOrReplaceDom(domObj); +2:1259c +3:1259c + serviceManager.addOrReplaceService(domObj); +====1 +1:2082c + result.put("status", DistroMapper.getDistroConfig()); +2:1278c +3:1278c + result.put("status", distroMapper.getDistroConfig()); +====1 +1:2087c + DistroMapper.clean(); +2:1283c +3:1283c + distroMapper.clean(); +====1 +1:2099,2100c + int domCount = domainsManager.getDomCount(); + int ipCount = domainsManager.getInstanceCount(); +2:1295,1296c +3:1295,1296c + int domCount = serviceManager.getDomCount(); + int ipCount = serviceManager.getInstanceCount(); +====1 +1:2102,2103c + int responsibleDomCount = domainsManager.getResponsibleDomCount(); + int responsibleIPCount = domainsManager.getResponsibleIPCount(); +2:1298,1299c +3:1298,1299c + int responsibleDomCount = serviceManager.getResponsibleDomCount(); + int responsibleIPCount = serviceManager.getResponsibleIPCount(); +====1 +1:2112c + result.put("notifyTask", RaftCore.notifier.getTaskSize()); +2:1307a +3:1307a +====1 +1:2198c + DistroMapper.onReceiveServerStatus(serverStatus); +2:1393c +3:1393c + distroMapper.onReceiveServerStatus(serverStatus); +====1 +1:2209c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1404c +3:1404c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2229c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1424c +3:1424c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2237c + result.put("responsibleServer", DistroMapper.mapSrv(dom)); +2:1432c +3:1432c + result.put("responsibleServer", distroMapper.mapSrv(dom)); +====1 +1:2246c + result.put("healthyList", DistroMapper.getHealthyList()); +2:1441c +3:1441c + result.put("healthyList", distroMapper.getHealthyList()); +====1 +1:2256c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1451c +3:1451c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2264c + result.put("responsible", DistroMapper.responsible(dom)); +2:1459c +3:1459c + result.put("responsible", distroMapper.responsible(dom)); +====1 +1:2275c + if (!NamingProxy.getServers().contains(serverIP)) { +2:1470c +3:1470c + if (!serverListManager.contains(serverIP)) { +====1 +1:2280c + DomainsManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, DomainsManager.DomainChecksum.class); +2:1475c +3:1475c + ServiceManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, ServiceManager.DomainChecksum.class); +====1 +1:2292c + Domain domain = domainsManager.getDomain(checksums.namespaceId, dom); +2:1487c +3:1487c + Domain domain = serviceManager.getService(checksums.namespaceId, dom); +====1 +1:2305c + domainsManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +2:1500c +3:1500c + serviceManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +====1 +1:2354c + pac.put("checkServer", DistroMapper.mapSrvName(vDom.getName())); +2:1549c +3:1549c + pac.put("checkServer", distroMapper.mapSrvName(vDom.getName())); +====1 +1:2432,2433c + public void setDomainsManager(DomainsManager domainsManager) { + this.domainsManager = domainsManager; +2:1627,1628c +3:1627,1628c + public void setServiceManager(ServiceManager serviceManager) { + this.serviceManager = serviceManager; diff --git a/src/python/merge_conflict_analysis_diffs/128/gitmerge_ort_imports_ignorespace/diff_ApiCommands.java.txt b/src/python/merge_conflict_analysis_diffs/128/gitmerge_ort_imports_ignorespace/diff_ApiCommands.java.txt new file mode 100644 index 0000000000..52d80407b5 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/128/gitmerge_ort_imports_ignorespace/diff_ApiCommands.java.txt @@ -0,0 +1,1481 @@ +====1 +1:27a +2:28c +3:28c + import com.alibaba.nacos.naming.cluster.ServerListManager; +====1 +1:35,42c + import com.alibaba.nacos.naming.raft.Datum; + import com.alibaba.nacos.naming.raft.RaftCore; + import com.alibaba.nacos.naming.raft.RaftPeer; + import com.alibaba.nacos.naming.raft.RaftProxy; + import com.ning.http.client.AsyncCompletionHandler; + import com.ning.http.client.Response; + import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; + import org.apache.catalina.util.ParameterMap; +2:35a +3:35a +====1 +1:65,69c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.TimeUnit; + import java.util.concurrent.locks.Condition; + import java.util.concurrent.locks.Lock; + import java.util.concurrent.locks.ReentrantLock; +2:57a +3:57a +====1 +1:84c + protected DomainsManager domainsManager; +2:72,87c +3:72,87c + protected ServiceManager serviceManager; + + @Autowired + private SwitchManager switchManager; + + @Autowired + private ServerListManager serverListManager; + + @Autowired + private SwitchDomain switchDomain; + + @Autowired + private PushService pushService; + + @Autowired + private DistroMapper distroMapper; +====1 +1:112c + result.put("cacheMillis", Switch.getPushCacheMillis(client.getDom())); +2:115c +3:115c + result.put("cacheMillis", switchDomain.getPushCacheMillis(client.getDom())); +====1 +1:126c + Domain dom = domainsManager.getDomain(namespaceId, name); +2:129c +3:129c + Domain dom = serviceManager.getService(namespaceId, name); +====1 +1:138c + result.put("count", domainsManager.getDomCount()); +2:141c +3:141c + result.put("count", serviceManager.getDomCount()); +====1 +1:151c + = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:154c +3:154c + = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:182c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:185c +3:185c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:212c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:215c +3:215c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:273c + if (domainsManager.getDomain(namespaceId, dom) != null) { +2:276c +3:276c + if (serviceManager.getService(namespaceId, dom) != null) { +====1 +1:292,294c + String dom = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(dom)) { + dom = WebUtils.required(request, "dom"); +2:295,297c +3:295,297c + String serviceName = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(serviceName)) { + serviceName = WebUtils.required(request, "dom"); +====1 +1:296,297c + String app; + app = WebUtils.optional(request, "app", StringUtils.EMPTY); +2:299c +3:299c + +====1 +1:305c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, dom); +2:307c +3:307c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, serviceName); +====1 +1:308,314c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + Map stringMap = new HashMap<>(16); + stringMap.put(Constants.REQUEST_PARAM_SERVICE_NAME, Arrays.asList(dom).toArray(new String[1])); + stringMap.put("enableClientBeat", Arrays.asList("true").toArray(new String[1])); + stringMap.put("cktype", Arrays.asList("TCP").toArray(new String[1])); + stringMap.put("appName", Arrays.asList(app).toArray(new String[1])); + stringMap.put("clusterName", Arrays.asList(clusterName).toArray(new String[1])); +2:310,311c +3:310,311c + IpAddress ipAddress = serviceManager.getInstance(namespaceId, serviceName, clientBeat.getCluster(), clientBeat.getIp(), + clientBeat.getPort()); +====1 +1:316,319c + //if domain does not exist, register it. + if (virtualClusterDomain == null) { + regDom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("dom not found, register it, dom: {}", dom); +2:313,322c +3:313,322c + if (ipAddress == null) { + ipAddress = new IpAddress(); + ipAddress.setPort(clientBeat.getPort()); + ipAddress.setIp(clientBeat.getIp()); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(serviceName); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:322,325c + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + String ip = clientBeat.getIp(); + int port = clientBeat.getPort(); +2:325c +3:325c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:327,354c + IpAddress ipAddress = new IpAddress(); + ipAddress.setPort(port); + ipAddress.setIp(ip); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(dom); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } + + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", Switch.getClientBeatInterval()); + + if (!virtualClusterDomain.allIPs().contains(ipAddress)) { + + if (!virtualClusterDomain.getEnableClientBeat()) { + return result; + } + + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("ip not found, register it, dom: {}, ip: {}", dom, ipAddress); +2:327,328c +3:327,328c + if (virtualClusterDomain == null) { + throw new NacosException(NacosException.SERVER_ERROR, "service not found: " + serviceName + "@" + namespaceId); +====1 +1:357,359c + if (!DistroMapper.responsible(dom)) { + String server = DistroMapper.mapSrv(dom); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", dom, server); +2:331,333c +3:331,333c + if (!distroMapper.responsible(serviceName)) { + String server = distroMapper.mapSrv(serviceName); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", serviceName, server); +====1 +1:381a +2:356,359c +3:356,359c + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", switchDomain.getClientBeatInterval()); + +====1 +1:385c + +2:362a +3:362a +====1 +1:400c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +2:377c +3:377c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +====1 +1:406c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +2:383c +3:383c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +====1 +1:503c + domainsManager.easyAddOrReplaceDom(domObj); +2:480c +3:480c + serviceManager.addOrReplaceService(domObj); +====1 +1:534c + String dom = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +2:511c +3:511c + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +====1 +1:536c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:513c +3:513c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:541,547c + ParameterMap parameterMap = new ParameterMap<>(); + parameterMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + parameterMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + parameterMap.put("json", Arrays.asList("true").toArray(new String[1])); + parameterMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + return remvIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, parameterMap)); +2:518c +3:518c + serviceManager.removeInstance(namespaceId, serviceName, ipAddress); +====1 +1:548a +2:520c +3:520c + return "ok"; +====1 +1:551c + @SuppressFBWarnings("JLM_JSR166_LOCK_MONITORENTER") +2:522a +3:522a +====1 +1:555,556c + String dom = WebUtils.required(request, "serviceName"); + String tenant = WebUtils.optional(request, "tid", StringUtils.EMPTY); +2:526,527c +3:526,527c + String serviceName = WebUtils.required(request, "serviceName"); + String clusterName = WebUtils.required(request, "clusterName"); +====1 +1:558c + String env = WebUtils.optional(request, "env", StringUtils.EMPTY); +2:528a +3:528a +====1 +1:560,562c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, UtilsAndCommons.getDefaultNamespaceId()); + + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:530,531c +3:530,531c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +====1 +1:566c + ipAddress.setServiceName(dom); +2:535c +3:535c + ipAddress.setServiceName(serviceName); +====1 +1:573,616c + if (virtualClusterDomain == null) { + + Lock lock = domainsManager.addLockIfAbsent(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + Condition condition = domainsManager.addCondtion(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + try { + regDom(request); + } catch (Exception e) { + Loggers.SRV_LOG.error("[REG-SERIVCE] register service failed, service:" + dom, e); + } + } + }); + try { + lock.lock(); + condition.await(5000, TimeUnit.MILLISECONDS); + } finally { + lock.unlock(); + } + + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + } + + if (virtualClusterDomain != null) { + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(request); + } + + if (Loggers.SRV_LOG.isDebugEnabled()) { + Loggers.SRV_LOG.debug("reg-service add ip: {}|{}", dom, ipAddress.toJSON()); + } + + Map stringMap = new HashMap<>(16); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } else { + throw new IllegalArgumentException("dom not found: " + dom); + } +2:542c +3:542c + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:621c + +2:546a +3:546a +====1 +1:628c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, name); +2:553c +3:553c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, name); +====1 +1:633,637c + RaftPeer leader = RaftCore.getLeader(); + if (leader == null) { + throw new IllegalStateException("not leader at present, cannot update"); + } + +2:557a +3:557a +====1 +1:678c + if (cktype.equals(AbstractHealthCheckProcessor.HTTP_PROCESSOR.getType())) { +2:598c +3:598c + if (cktype.equals(HealthCheckType.HTTP.name().toLowerCase())) { +====1 +1:683c + } else if (cktype.equals(AbstractHealthCheckProcessor.TCP_PROCESSOR.getType())) { +2:603c +3:603c + } else if (cktype.equals(HealthCheckType.TCP.name().toLowerCase())) { +====1 +1:687c + } else if (cktype.equals(AbstractHealthCheckProcessor.MYSQL_PROCESSOR.getType())) { +2:607c +3:607c + } else if (cktype.equals(HealthCheckType.MYSQL.name().toLowerCase())) { +====1 +1:779c + domainsManager.easyAddOrReplaceDom(dom); +2:699c +3:699c + serviceManager.addOrReplaceService(dom); +====1 +1:787c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + domainsManager.getDomCount() +2:707c +3:707c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + serviceManager.getDomCount() +====1 +1:792c + +2:711a +3:711a +====1 +1:800c + if (domainsManager.getDomain(namespaceId, dom) == null) { +2:719c +3:719c + if (serviceManager.getService(namespaceId, dom) == null) { +====1 +1:804c + domainsManager.easyRemoveDom(namespaceId, dom); +2:723c +3:723c + serviceManager.easyRemoveDom(namespaceId, dom); +====1 +1:814c + Map> domMap = domainsManager.getAllDomNames(); +2:733c +3:733c + Map> domMap = serviceManager.getAllDomNames(); +====1 +1:818c + Domain domObj = domainsManager.getDomain(namespaceId, dom); +2:737c +3:737c + Domain domObj = serviceManager.getService(namespaceId, dom); +====1 +1:841,979c + @RequestMapping("/onAddIP4Dom") + public String onAddIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer {} tried to publish data but wasn't leader, leader: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: {}, cur-term: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term.get()); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + final String dom = WebUtils.required(request, "dom"); + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", Boolean.FALSE.toString())); + + String ipListString = WebUtils.required(request, "ipList"); + List newIPs = new ArrayList<>(); + + List ipList; + if (Boolean.parseBoolean(WebUtils.optional(request, SwitchEntry.PARAM_JSON, Boolean.FALSE.toString()))) { + newIPs = JSON.parseObject(ipListString, new TypeReference>() { + }); + } else { + ipList = Arrays.asList(ipListString.split(",")); + for (String ip : ipList) { + IpAddress ipAddr = IpAddress.fromJSON(ip); + newIPs.add(ipAddr); + } + } + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domainsManager.getDomain(namespaceId, dom).allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + domainsManager.easyAddIP4Dom(namespaceId, dom, newIPs, term); + + return "ok"; + } + + private void syncOnUpdateIP4Dom(String namespaceId, String dom, Map proxyParams, String action) throws InterruptedException { + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + final CountDownLatch countDownLatch = new CountDownLatch(RaftCore.getPeerSet().majorityCount()); + updateIpPublish(proxyParams, countDownLatch, action); + if (!countDownLatch.await(UtilsAndCommons.MAX_PUBLISH_WAIT_TIME_MILLIS, TimeUnit.MILLISECONDS)) { + Loggers.RAFT.info("data publish failed, key=" + key, ",notify timeout."); + throw new IllegalArgumentException("data publish failed, key=" + key); + } + } + + private void syncOnAddIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void asyncOnAddIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void syncOnRemvIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void asyncOnRemvIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void updateIpPublish(Map proxyParams, CountDownLatch countDownLatch, String action) { + + for (final String peer : RaftCore.getPeerSet().allServersWithoutMySelf()) { + + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + + String server = peer; + + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String api = action.equals("remove") ? "onRemvIP4Dom" : "onAddIP4Dom"; + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/" + api; + + try { + HttpClient.asyncHttpPost(url, null, proxyParams, new AsyncCompletionHandler() { + @Override + public Integer onCompleted(Response response) throws Exception { + if (response.getStatusCode() != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip params: " + proxyParams + + ",code: " + response.getStatusCode() + ", caused " + response.getResponseBody() + + ", server: " + peer); + return 1; + } + if (countDownLatch != null) { + countDownLatch.countDown(); + } + return 0; + } + }); + } catch (Exception e) { + Loggers.SRV_LOG.error(action + "-IP", "failed when publish to peer." + url, e); + } + } + }); + } + } + +2:759a +3:759a +====1 +1:984c + if (Switch.getDisableAddIP()) { +2:764c +3:764c + if (switchDomain.isDisableAddIP()) { +====1 +1:988,989c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +2:767a +3:767a +====1 +1:999a +2:778,784c +3:778,784c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String clusterName = WebUtils.required(request, "clusterName"); + +====1 +1:1005c + ipList = Arrays.asList(ipListString); +2:789a +3:789a +====1 +1:1016,1110c + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/addIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip for dom, caused {}", result1.content); + throw new IllegalArgumentException("failed to add ip for dom, caused " + result1.content); + } + + return "ok"; + } + + final String dom = WebUtils.required(request, "dom"); + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", "false")); + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domain.allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + + String key = UtilsAndCommons.getIPListStoreKey(domain); + + Datum datum = RaftCore.getDatum(key); + if (datum == null) { + try { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).lock(); + datum = RaftCore.getDatum(key); + if (datum == null) { + datum = new Datum(); + datum.key = key; + RaftCore.addDatum(datum); + } + } finally { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).unlock(); + } + } + + long timestamp = RaftCore.getDatum(key).timestamp.get(); + + if (RaftCore.isLeader()) { + try { + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onAddIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnAddIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnAddIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + } +2:800c +3:800c + serviceManager.addInstance(namespaceId, serviceName, clusterName, newIPs.toArray(new IpAddress[newIPs.size()])); +====1 +1:1119c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:809c +3:809c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:1127c + long cacheMillis = Switch.getCacheMillis(dom); +2:817c +3:817c + long cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1131,1132c + if (udpPort > 0 && PushService.canEnablePush(agent)) { + PushService.addClient(namespaceId, dom, +2:821,822c +3:821,822c + if (udpPort > 0 && pushService.canEnablePush(agent)) { + pushService.addClient(namespaceId, dom, +====1 +1:1139c + cacheMillis = Switch.getPushCacheMillis(dom); +2:829c +3:829c + cacheMillis = switchDomain.getPushCacheMillis(dom); +====1 +1:1143c + cacheMillis = Switch.getCacheMillis(dom); +2:833c +3:833c + cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1233,1274c + @RequestMapping("/onRemvIP4Dom") + public void onRemvIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Deleting IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer(" + JSON.toJSONString(clientIP) + ") tried to publish " + + "data but wasn't leader, leader: " + JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: " + + JSON.toJSONString(clientIP) + ", cur-term: " + JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + final String dom = WebUtils.required(request, "dom"); + final String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + List removedIPs = getIpAddresses(request); + + if (CollectionUtils.isEmpty(removedIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + domainsManager.easyRemvIP4Dom(namespaceId, dom, removedIPs, term); + } + +2:922a +3:922a +====1 +1:1279,1280c + if (DistroMapper.getLocalhostIP().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + DistroMapper.getLocalhostIP()); +2:927,928c +3:927,928c + if (NetUtils.localServer().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + NetUtils.localServer()); +====1 +1:1308,1314c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + String dom = WebUtils.required(request, "dom"); + String ipListString = WebUtils.required(request, "ipList"); + + if (Loggers.DEBUG_LOG.isDebugEnabled()) { + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: serviceName: {}, iplist: {}", dom, ipListString); +2:956,957c +3:956,957c + if (switchDomain.isDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); +====1 +1:1323c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments, params: {}", proxyParams); +2:966c +3:966c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: {}", proxyParams); +====1 +1:1326c + List ipList = new ArrayList<>(); +2:969,976c +3:969,976c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String ipListString = WebUtils.required(request, "ipList"); + final List ipList; + List removedIPs = new ArrayList<>(); +====1 +1:1328c + List ipObjList = new ArrayList<>(ipList.size()); +2:977a +3:977a +====1 +1:1330,1331c + ipList = Arrays.asList(ipListString); + ipObjList = JSON.parseObject(ipListString, new TypeReference>() { +2:979c +3:979c + removedIPs = JSON.parseObject(ipListString, new TypeReference>() { +====1 +1:1336,1359c + ipObjList.add(IpAddress.fromJSON(ip)); + } + } + + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/remvIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to remove ip for dom, caused: {}", result1.content); + throw new IllegalArgumentException("failed to remove ip for dom, caused " + result1.content); +2:984,985c +3:984,985c + IpAddress ipAddr = IpAddress.fromJSON(ip); + removedIPs.add(ipAddr); +====1 +1:1361,1379c + + return "ok"; + } + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + if (CollectionUtils.isEmpty(ipObjList)) { + throw new IllegalArgumentException("Empty ip list"); + } + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + long timestamp = 1; + if (RaftCore.getDatum(key) != null) { + timestamp = RaftCore.getDatum(key).timestamp.get(); +2:986a +3:986a +====1 +1:1382,1412c + if (RaftCore.isLeader()) { + + try { + + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onRemvIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnRemvIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnRemvIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + Loggers.EVT_LOG.info("dom: {} {POS} {IP-REMV} new: {} operatorIP: {}", + dom, ipListString, WebUtils.optional(request, "clientIP", "unknown")); + } +2:989c +3:989c + serviceManager.removeInstance(namespaceId, serviceName, removedIPs.toArray(new IpAddress[removedIPs.size()])); +====1 +1:1426,1428c + int failedPushCount = PushService.getFailedPushCount(); + result.put("succeed", PushService.getTotalPush() - failedPushCount); + result.put("total", PushService.getTotalPush()); +2:1003,1005c +3:1003,1005c + int failedPushCount = pushService.getFailedPushCount(); + result.put("succeed", pushService.getTotalPush() - failedPushCount); + result.put("total", pushService.getTotalPush()); +====1 +1:1430,1431c + if (PushService.getTotalPush() > 0) { + result.put("ratio", ((float) PushService.getTotalPush() - failedPushCount) / PushService.getTotalPush()); +2:1007,1008c +3:1007,1008c + if (pushService.getTotalPush() > 0) { + result.put("ratio", ((float) pushService.getTotalPush() - failedPushCount) / pushService.getTotalPush()); +====1 +1:1457,1459c + + ReentrantLock lock = new ReentrantLock(); + +2:1033a +3:1033a +====1 +1:1463a +2:1038,1039c +3:1038,1039c + String entry = WebUtils.required(request, "entry"); + String value = WebUtils.required(request, "value"); +====1 +1:1465,1846c + if (!RaftCore.isLeader() && !debug) { + Map tmpParams = new HashMap<>(16); + for (Map.Entry entry : request.getParameterMap().entrySet()) { + tmpParams.put(entry.getKey(), entry.getValue()[0]); + } + + RaftProxy.proxyGET(UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/updateSwitch", tmpParams); + return "ok"; + } + + try { + lock.lock(); + String entry = WebUtils.required(request, "entry"); + + Datum datum = RaftCore.getDatum(UtilsAndCommons.DOMAINS_DATA_ID_PRE + UtilsAndCommons.SWITCH_DOMAIN_NAME); + SwitchDomain switchDomain = null; + + if (datum != null) { + switchDomain = JSON.parseObject(datum.value, SwitchDomain.class); + } else { + Loggers.SRV_LOG.warn("datum: {}{} is null", UtilsAndCommons.DOMAINS_DATA_ID_PRE, UtilsAndCommons.SWITCH_DOMAIN_NAME); + } + + if (SwitchEntry.BATCH.equals(entry)) { + //batch update + SwitchDomain dom = JSON.parseObject(WebUtils.required(request, "json"), SwitchDomain.class); + dom.setEnableStandalone(Switch.isEnableStandalone()); + if (dom.httpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN + || dom.tcpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN) { + + throw new IllegalArgumentException("min check time for http or tcp is too small(<500)"); + } + + if (dom.httpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX + || dom.tcpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX) { + + throw new IllegalArgumentException("max check time for http or tcp is too small(<3000)"); + } + + if (dom.httpHealthParams.getFactor() < 0 + || dom.httpHealthParams.getFactor() > 1 + || dom.tcpHealthParams.getFactor() < 0 + || dom.tcpHealthParams.getFactor() > 1) { + + throw new IllegalArgumentException("malformed factor"); + } + + Switch.setDom(dom); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (switchDomain != null) { + Switch.setDom(switchDomain); + } + + if (entry.equals(SwitchEntry.DISTRO_THRESHOLD)) { + Float threshold = Float.parseFloat(WebUtils.required(request, "distroThreshold")); + + if (threshold <= 0) { + throw new IllegalArgumentException("distroThreshold can not be zero or negative: " + threshold); + } + + Switch.setDistroThreshold(threshold); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + + if (entry.equals(SwitchEntry.ENABLE_ALL_DOM_NAME_CACHE)) { + Boolean enable = Boolean.parseBoolean(WebUtils.required(request, "enableAllDomNameCache")); + Switch.setAllDomNameCache(enable); + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.INCREMENTAL_LIST)) { + String action = WebUtils.required(request, "action"); + List doms = Arrays.asList(WebUtils.required(request, "incrementalList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getIncrementalList().addAll(doms); + } else if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getIncrementalList().removeAll(doms); + } else { + throw new IllegalArgumentException("action is not allowed: " + action); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_WHITLE_LIST)) { + String action = WebUtils.required(request, "action"); + List whiteList = Arrays.asList(WebUtils.required(request, "healthCheckWhiteList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getHealthCheckWhiteList().addAll(whiteList); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getHealthCheckWhiteList().removeAll(whiteList); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.CLIENT_BEAT_INTERVAL)) { + long clientBeatInterval = Long.parseLong(WebUtils.required(request, "clientBeatInterval")); + Switch.setClientBeatInterval(clientBeatInterval); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setPushJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setPushPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setPushCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_GO, type)) { + Switch.setPushGoVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.TRAFFIC_SCHEDULING_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setTrafficSchedulingJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setTrafficSchedulingPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setTrafficSchedulingCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_TENGINE, type)) { + Switch.setTrafficSchedulingTengineVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_PUSH_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min cache time for http or tcp is too small(<10000)"); + } + + Switch.setPushCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + // extremely careful while modifying this, cause it will affect all clients without pushing enabled + if (entry.equals(SwitchEntry.DEFAULT_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min default cache time is too small(<1000)"); + } + + Switch.setCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.MASTERS)) { + List masters = Arrays.asList(WebUtils.required(request, "names").split(",")); + + Switch.setMasters(masters); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISTRO)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setDistroEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.CHECK)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setHeathCheckEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DEFAULT_HEALTH_CHECK_MODE)) { + String defaultHealthCheckMode = WebUtils.required(request, "mode"); + + Switch.setDefaultHealthCheckMode(defaultHealthCheckMode); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DOM_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_DOM_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("domStatusSynchronizationPeriodMillis is too small(<5000)"); + } + + Switch.setDomStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SERVER_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_SERVER_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("serverStatusSynchronizationPeriodMillis is too small(<15000)"); + } + + Switch.setServerStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_TIMES)) { + Integer times = Integer.parseInt(WebUtils.required(request, "times")); + + Switch.setCheckTimes(times); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISABLE_ADD_IP)) { + boolean disableAddIP = Boolean.parseBoolean(WebUtils.required(request, "disableAddIP")); + + Switch.setDisableAddIP(disableAddIP); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.ENABLE_CACHE)) { + boolean enableCache = Boolean.parseBoolean(WebUtils.required(request, "enableCache")); + + Switch.setEnableCache(enableCache); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SEND_BEAT_ONLY)) { + boolean sendBeatOnly = Boolean.parseBoolean(WebUtils.required(request, "sendBeatOnly")); + + Switch.setSendBeatOnly(sendBeatOnly); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.LIMITED_URL_MAP)) { + Map limitedUrlMap = new HashMap<>(16); + String limitedUrls = WebUtils.required(request, "limitedUrls"); + + if (!StringUtils.isEmpty(limitedUrls)) { + String[] entries = limitedUrls.split(","); + for (int i = 0; i < entries.length; i++) { + String[] parts = entries[i].split(":"); + if (parts.length < 2) { + throw new IllegalArgumentException("invalid input for limited urls"); + } + + String limitedUrl = parts[0]; + if (StringUtils.isEmpty(limitedUrl)) { + throw new IllegalArgumentException("url can not be empty, url: " + limitedUrl); + } + + int statusCode = Integer.parseInt(parts[1]); + if (statusCode <= 0) { + throw new IllegalArgumentException("illegal normal status code: " + statusCode); + } + + limitedUrlMap.put(limitedUrl, statusCode); + + } + + Switch.setLimitedUrlMap(limitedUrlMap); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.ENABLE_STANDALONE)) { + String enable = WebUtils.required(request, "enableStandalone"); + + if (!StringUtils.isNotEmpty(enable)) { + Switch.setEnableStandalone(Boolean.parseBoolean(enable)); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + + throw new IllegalArgumentException("update entry not found: " + entry); + } finally { + lock.unlock(); + } + +2:1041c +3:1041c + switchManager.update(entry, value, debug); +====1 +1:1847a +2:1043c +3:1043c + return "ok"; +====1 +1:1858c + return JSON.parseObject(Switch.getDom().toJSON()); +2:1054c +3:1054c + return JSON.parseObject(switchDomain.toJSON()); +====1 +1:1906c + Map> domMap = domainsManager.getAllDomNames(); +2:1102c +3:1102c + Map> domMap = serviceManager.getAllDomNames(); +====1 +1:1911c + if (DistroMapper.responsible(dom) || !responsibleOnly) { +2:1107c +3:1107c + if (distroMapper.responsible(dom) || !responsibleOnly) { +====1 +1:1933,1934c + List doms + = domainsManager.searchDomains(namespaceId, ".*" + expr + ".*"); +2:1129,1130c +3:1129,1130c + List doms + = serviceManager.searchDomains(namespaceId, ".*" + expr + ".*"); +====1 +1:1980c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1176c +3:1176c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2063c + domainsManager.easyAddOrReplaceDom(domObj); +2:1259c +3:1259c + serviceManager.addOrReplaceService(domObj); +====1 +1:2082c + result.put("status", DistroMapper.getDistroConfig()); +2:1278c +3:1278c + result.put("status", distroMapper.getDistroConfig()); +====1 +1:2087c + DistroMapper.clean(); +2:1283c +3:1283c + distroMapper.clean(); +====1 +1:2099,2100c + int domCount = domainsManager.getDomCount(); + int ipCount = domainsManager.getInstanceCount(); +2:1295,1296c +3:1295,1296c + int domCount = serviceManager.getDomCount(); + int ipCount = serviceManager.getInstanceCount(); +====1 +1:2102,2103c + int responsibleDomCount = domainsManager.getResponsibleDomCount(); + int responsibleIPCount = domainsManager.getResponsibleIPCount(); +2:1298,1299c +3:1298,1299c + int responsibleDomCount = serviceManager.getResponsibleDomCount(); + int responsibleIPCount = serviceManager.getResponsibleIPCount(); +====1 +1:2112c + result.put("notifyTask", RaftCore.notifier.getTaskSize()); +2:1307a +3:1307a +====1 +1:2198c + DistroMapper.onReceiveServerStatus(serverStatus); +2:1393c +3:1393c + distroMapper.onReceiveServerStatus(serverStatus); +====1 +1:2209c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1404c +3:1404c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2229c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1424c +3:1424c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2237c + result.put("responsibleServer", DistroMapper.mapSrv(dom)); +2:1432c +3:1432c + result.put("responsibleServer", distroMapper.mapSrv(dom)); +====1 +1:2246c + result.put("healthyList", DistroMapper.getHealthyList()); +2:1441c +3:1441c + result.put("healthyList", distroMapper.getHealthyList()); +====1 +1:2256c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1451c +3:1451c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2264c + result.put("responsible", DistroMapper.responsible(dom)); +2:1459c +3:1459c + result.put("responsible", distroMapper.responsible(dom)); +====1 +1:2275c + if (!NamingProxy.getServers().contains(serverIP)) { +2:1470c +3:1470c + if (!serverListManager.contains(serverIP)) { +====1 +1:2280c + DomainsManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, DomainsManager.DomainChecksum.class); +2:1475c +3:1475c + ServiceManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, ServiceManager.DomainChecksum.class); +====1 +1:2292c + Domain domain = domainsManager.getDomain(checksums.namespaceId, dom); +2:1487c +3:1487c + Domain domain = serviceManager.getService(checksums.namespaceId, dom); +====1 +1:2305c + domainsManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +2:1500c +3:1500c + serviceManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +====1 +1:2354c + pac.put("checkServer", DistroMapper.mapSrvName(vDom.getName())); +2:1549c +3:1549c + pac.put("checkServer", distroMapper.mapSrvName(vDom.getName())); +====1 +1:2432,2433c + public void setDomainsManager(DomainsManager domainsManager) { + this.domainsManager = domainsManager; +2:1627,1628c +3:1627,1628c + public void setServiceManager(ServiceManager serviceManager) { + this.serviceManager = serviceManager; diff --git a/src/python/merge_conflict_analysis_diffs/128/gitmerge_recursive_histogram/diff_ApiCommands.java.txt b/src/python/merge_conflict_analysis_diffs/128/gitmerge_recursive_histogram/diff_ApiCommands.java.txt new file mode 100644 index 0000000000..6d144cc367 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/128/gitmerge_recursive_histogram/diff_ApiCommands.java.txt @@ -0,0 +1,1549 @@ +====1 +1:27a +2:28c +3:28c + import com.alibaba.nacos.naming.cluster.ServerListManager; +====1 +1:35,42c + import com.alibaba.nacos.naming.raft.Datum; + import com.alibaba.nacos.naming.raft.RaftCore; + import com.alibaba.nacos.naming.raft.RaftPeer; + import com.alibaba.nacos.naming.raft.RaftProxy; + import com.ning.http.client.AsyncCompletionHandler; + import com.ning.http.client.Response; + import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; + import org.apache.catalina.util.ParameterMap; +2:35a +3:35a +====1 +1:65,69c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.TimeUnit; + import java.util.concurrent.locks.Condition; + import java.util.concurrent.locks.Lock; + import java.util.concurrent.locks.ReentrantLock; +2:57a +3:57a +====1 +1:84c + protected DomainsManager domainsManager; +2:72,87c +3:72,87c + protected ServiceManager serviceManager; + + @Autowired + private SwitchManager switchManager; + + @Autowired + private ServerListManager serverListManager; + + @Autowired + private SwitchDomain switchDomain; + + @Autowired + private PushService pushService; + + @Autowired + private DistroMapper distroMapper; +====1 +1:112c + result.put("cacheMillis", Switch.getPushCacheMillis(client.getDom())); +2:115c +3:115c + result.put("cacheMillis", switchDomain.getPushCacheMillis(client.getDom())); +====1 +1:126c + Domain dom = domainsManager.getDomain(namespaceId, name); +2:129c +3:129c + Domain dom = serviceManager.getService(namespaceId, name); +====1 +1:138c + result.put("count", domainsManager.getDomCount()); +2:141c +3:141c + result.put("count", serviceManager.getDomCount()); +====1 +1:151c + = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:154c +3:154c + = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:182c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:185c +3:185c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:212c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:215c +3:215c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:273c + if (domainsManager.getDomain(namespaceId, dom) != null) { +2:276c +3:276c + if (serviceManager.getService(namespaceId, dom) != null) { +====1 +1:292,294c + String dom = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(dom)) { + dom = WebUtils.required(request, "dom"); +2:295,297c +3:295,297c + String serviceName = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(serviceName)) { + serviceName = WebUtils.required(request, "dom"); +====1 +1:296,297c + String app; + app = WebUtils.optional(request, "app", StringUtils.EMPTY); +2:299c +3:299c + +====1 +1:305c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, dom); +2:307c +3:307c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, serviceName); +====1 +1:308,314c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + Map stringMap = new HashMap<>(16); + stringMap.put(Constants.REQUEST_PARAM_SERVICE_NAME, Arrays.asList(dom).toArray(new String[1])); + stringMap.put("enableClientBeat", Arrays.asList("true").toArray(new String[1])); + stringMap.put("cktype", Arrays.asList("TCP").toArray(new String[1])); + stringMap.put("appName", Arrays.asList(app).toArray(new String[1])); + stringMap.put("clusterName", Arrays.asList(clusterName).toArray(new String[1])); +2:310,311c +3:310,311c + IpAddress ipAddress = serviceManager.getInstance(namespaceId, serviceName, clientBeat.getCluster(), clientBeat.getIp(), + clientBeat.getPort()); +====1 +1:316,319c + //if domain does not exist, register it. + if (virtualClusterDomain == null) { + regDom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("dom not found, register it, dom: {}", dom); +2:313,322c +3:313,322c + if (ipAddress == null) { + ipAddress = new IpAddress(); + ipAddress.setPort(clientBeat.getPort()); + ipAddress.setIp(clientBeat.getIp()); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(serviceName); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:322,325c + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + String ip = clientBeat.getIp(); + int port = clientBeat.getPort(); +2:325c +3:325c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:327,354c + IpAddress ipAddress = new IpAddress(); + ipAddress.setPort(port); + ipAddress.setIp(ip); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(dom); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } + + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", Switch.getClientBeatInterval()); + + if (!virtualClusterDomain.allIPs().contains(ipAddress)) { + + if (!virtualClusterDomain.getEnableClientBeat()) { + return result; + } + + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("ip not found, register it, dom: {}, ip: {}", dom, ipAddress); +2:327,328c +3:327,328c + if (virtualClusterDomain == null) { + throw new NacosException(NacosException.SERVER_ERROR, "service not found: " + serviceName + "@" + namespaceId); +====1 +1:357,359c + if (!DistroMapper.responsible(dom)) { + String server = DistroMapper.mapSrv(dom); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", dom, server); +2:331,333c +3:331,333c + if (!distroMapper.responsible(serviceName)) { + String server = distroMapper.mapSrv(serviceName); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", serviceName, server); +====1 +1:381a +2:356,359c +3:356,359c + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", switchDomain.getClientBeatInterval()); + +====1 +1:385c + +2:362a +3:362a +====1 +1:400c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +2:377c +3:377c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +====1 +1:406c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +2:383c +3:383c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +====1 +1:503c + domainsManager.easyAddOrReplaceDom(domObj); +2:480c +3:480c + serviceManager.addOrReplaceService(domObj); +====1 +1:534c + String dom = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +2:511c +3:511c + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +====1 +1:536c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:513c +3:513c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:541,547c + ParameterMap parameterMap = new ParameterMap<>(); + parameterMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + parameterMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + parameterMap.put("json", Arrays.asList("true").toArray(new String[1])); + parameterMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + return remvIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, parameterMap)); +2:518c +3:518c + serviceManager.removeInstance(namespaceId, serviceName, ipAddress); +====1 +1:548a +2:520c +3:520c + return "ok"; +====1 +1:551c + @SuppressFBWarnings("JLM_JSR166_LOCK_MONITORENTER") +2:522a +3:522a +====1 +1:555,556c + String dom = WebUtils.required(request, "serviceName"); + String tenant = WebUtils.optional(request, "tid", StringUtils.EMPTY); +2:526,527c +3:526,527c + String serviceName = WebUtils.required(request, "serviceName"); + String clusterName = WebUtils.required(request, "clusterName"); +====1 +1:558c + String env = WebUtils.optional(request, "env", StringUtils.EMPTY); +2:528a +3:528a +====1 +1:560,562c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, UtilsAndCommons.getDefaultNamespaceId()); + + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:530,531c +3:530,531c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +====1 +1:566c + ipAddress.setServiceName(dom); +2:535c +3:535c + ipAddress.setServiceName(serviceName); +====1 +1:573,616c + if (virtualClusterDomain == null) { + + Lock lock = domainsManager.addLockIfAbsent(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + Condition condition = domainsManager.addCondtion(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + try { + regDom(request); + } catch (Exception e) { + Loggers.SRV_LOG.error("[REG-SERIVCE] register service failed, service:" + dom, e); + } + } + }); + try { + lock.lock(); + condition.await(5000, TimeUnit.MILLISECONDS); + } finally { + lock.unlock(); + } + + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + } + + if (virtualClusterDomain != null) { + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(request); + } + + if (Loggers.SRV_LOG.isDebugEnabled()) { + Loggers.SRV_LOG.debug("reg-service add ip: {}|{}", dom, ipAddress.toJSON()); + } + + Map stringMap = new HashMap<>(16); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } else { + throw new IllegalArgumentException("dom not found: " + dom); + } +2:542c +3:542c + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:621c + +2:546a +3:546a +====1 +1:628c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, name); +2:553c +3:553c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, name); +====1 +1:633,637c + RaftPeer leader = RaftCore.getLeader(); + if (leader == null) { + throw new IllegalStateException("not leader at present, cannot update"); + } + +2:557a +3:557a +====1 +1:678c + if (cktype.equals(AbstractHealthCheckProcessor.HTTP_PROCESSOR.getType())) { +2:598c +3:598c + if (cktype.equals(HealthCheckType.HTTP.name().toLowerCase())) { +====1 +1:683c + } else if (cktype.equals(AbstractHealthCheckProcessor.TCP_PROCESSOR.getType())) { +2:603c +3:603c + } else if (cktype.equals(HealthCheckType.TCP.name().toLowerCase())) { +====1 +1:687c + } else if (cktype.equals(AbstractHealthCheckProcessor.MYSQL_PROCESSOR.getType())) { +2:607c +3:607c + } else if (cktype.equals(HealthCheckType.MYSQL.name().toLowerCase())) { +====1 +1:779c + domainsManager.easyAddOrReplaceDom(dom); +2:699c +3:699c + serviceManager.addOrReplaceService(dom); +====1 +1:787c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + domainsManager.getDomCount() +2:707c +3:707c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + serviceManager.getDomCount() +====1 +1:792c + +2:711a +3:711a +====1 +1:800c + if (domainsManager.getDomain(namespaceId, dom) == null) { +2:719c +3:719c + if (serviceManager.getService(namespaceId, dom) == null) { +====1 +1:804c + domainsManager.easyRemoveDom(namespaceId, dom); +2:723c +3:723c + serviceManager.easyRemoveDom(namespaceId, dom); +====1 +1:814c + Map> domMap = domainsManager.getAllDomNames(); +2:733c +3:733c + Map> domMap = serviceManager.getAllDomNames(); +====1 +1:818c + Domain domObj = domainsManager.getDomain(namespaceId, dom); +2:737c +3:737c + Domain domObj = serviceManager.getService(namespaceId, dom); +====1 +1:841,979c + @RequestMapping("/onAddIP4Dom") + public String onAddIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer {} tried to publish data but wasn't leader, leader: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: {}, cur-term: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term.get()); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + final String dom = WebUtils.required(request, "dom"); + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", Boolean.FALSE.toString())); + + String ipListString = WebUtils.required(request, "ipList"); + List newIPs = new ArrayList<>(); + + List ipList; + if (Boolean.parseBoolean(WebUtils.optional(request, SwitchEntry.PARAM_JSON, Boolean.FALSE.toString()))) { + newIPs = JSON.parseObject(ipListString, new TypeReference>() { + }); + } else { + ipList = Arrays.asList(ipListString.split(",")); + for (String ip : ipList) { + IpAddress ipAddr = IpAddress.fromJSON(ip); + newIPs.add(ipAddr); + } + } + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domainsManager.getDomain(namespaceId, dom).allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + domainsManager.easyAddIP4Dom(namespaceId, dom, newIPs, term); + + return "ok"; + } + + private void syncOnUpdateIP4Dom(String namespaceId, String dom, Map proxyParams, String action) throws InterruptedException { + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + final CountDownLatch countDownLatch = new CountDownLatch(RaftCore.getPeerSet().majorityCount()); + updateIpPublish(proxyParams, countDownLatch, action); + if (!countDownLatch.await(UtilsAndCommons.MAX_PUBLISH_WAIT_TIME_MILLIS, TimeUnit.MILLISECONDS)) { + Loggers.RAFT.info("data publish failed, key=" + key, ",notify timeout."); + throw new IllegalArgumentException("data publish failed, key=" + key); + } + } + + private void syncOnAddIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void asyncOnAddIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void syncOnRemvIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void asyncOnRemvIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void updateIpPublish(Map proxyParams, CountDownLatch countDownLatch, String action) { + + for (final String peer : RaftCore.getPeerSet().allServersWithoutMySelf()) { + + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + + String server = peer; + + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String api = action.equals("remove") ? "onRemvIP4Dom" : "onAddIP4Dom"; + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/" + api; + + try { + HttpClient.asyncHttpPost(url, null, proxyParams, new AsyncCompletionHandler() { + @Override + public Integer onCompleted(Response response) throws Exception { + if (response.getStatusCode() != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip params: " + proxyParams + + ",code: " + response.getStatusCode() + ", caused " + response.getResponseBody() + + ", server: " + peer); + return 1; + } + if (countDownLatch != null) { + countDownLatch.countDown(); + } + return 0; + } + }); + } catch (Exception e) { + Loggers.SRV_LOG.error(action + "-IP", "failed when publish to peer." + url, e); + } + } + }); + } + } + +2:759a +3:759a +====1 +1:984c + if (Switch.getDisableAddIP()) { +2:764c +3:764c + if (switchDomain.isDisableAddIP()) { +====1 +1:988,989c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +2:767a +3:767a +====1 +1:999a +2:778,784c +3:778,784c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String clusterName = WebUtils.required(request, "clusterName"); + +====1 +1:1005c + ipList = Arrays.asList(ipListString); +2:789a +3:789a +====1 +1:1016,1110c + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/addIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip for dom, caused {}", result1.content); + throw new IllegalArgumentException("failed to add ip for dom, caused " + result1.content); + } + + return "ok"; + } + + final String dom = WebUtils.required(request, "dom"); + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", "false")); + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domain.allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + + String key = UtilsAndCommons.getIPListStoreKey(domain); + + Datum datum = RaftCore.getDatum(key); + if (datum == null) { + try { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).lock(); + datum = RaftCore.getDatum(key); + if (datum == null) { + datum = new Datum(); + datum.key = key; + RaftCore.addDatum(datum); + } + } finally { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).unlock(); + } + } + + long timestamp = RaftCore.getDatum(key).timestamp.get(); + + if (RaftCore.isLeader()) { + try { + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onAddIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnAddIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnAddIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + } +2:800c +3:800c + serviceManager.addInstance(namespaceId, serviceName, clusterName, newIPs.toArray(new IpAddress[newIPs.size()])); +====1 +1:1119c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:809c +3:809c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:1127c + long cacheMillis = Switch.getCacheMillis(dom); +2:817c +3:817c + long cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1131,1132c + if (udpPort > 0 && PushService.canEnablePush(agent)) { + PushService.addClient(namespaceId, dom, +2:821,822c +3:821,822c + if (udpPort > 0 && pushService.canEnablePush(agent)) { + pushService.addClient(namespaceId, dom, +====1 +1:1139c + cacheMillis = Switch.getPushCacheMillis(dom); +2:829c +3:829c + cacheMillis = switchDomain.getPushCacheMillis(dom); +====1 +1:1143c + cacheMillis = Switch.getCacheMillis(dom); +2:833c +3:833c + cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1233,1274c + @RequestMapping("/onRemvIP4Dom") + public void onRemvIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Deleting IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer(" + JSON.toJSONString(clientIP) + ") tried to publish " + + "data but wasn't leader, leader: " + JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: " + + JSON.toJSONString(clientIP) + ", cur-term: " + JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + final String dom = WebUtils.required(request, "dom"); + final String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + List removedIPs = getIpAddresses(request); + + if (CollectionUtils.isEmpty(removedIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + domainsManager.easyRemvIP4Dom(namespaceId, dom, removedIPs, term); + } + +2:922a +3:922a +====1 +1:1279,1280c + if (DistroMapper.getLocalhostIP().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + DistroMapper.getLocalhostIP()); +2:927,928c +3:927,928c + if (NetUtils.localServer().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + NetUtils.localServer()); +====1 +1:1308,1314c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + String dom = WebUtils.required(request, "dom"); + String ipListString = WebUtils.required(request, "ipList"); + + if (Loggers.DEBUG_LOG.isDebugEnabled()) { + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: serviceName: {}, iplist: {}", dom, ipListString); +2:956,957c +3:956,957c + if (switchDomain.isDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); +====1 +1:1323c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments, params: {}", proxyParams); +2:966c +3:966c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: {}", proxyParams); +====1 +1:1326c + List ipList = new ArrayList<>(); +2:969,976c +3:969,976c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String ipListString = WebUtils.required(request, "ipList"); + final List ipList; + List removedIPs = new ArrayList<>(); +====1 +1:1328c + List ipObjList = new ArrayList<>(ipList.size()); +2:977a +3:977a +====1 +1:1330,1331c + ipList = Arrays.asList(ipListString); + ipObjList = JSON.parseObject(ipListString, new TypeReference>() { +2:979c +3:979c + removedIPs = JSON.parseObject(ipListString, new TypeReference>() { +====1 +1:1336,1359c + ipObjList.add(IpAddress.fromJSON(ip)); + } + } + + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/remvIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to remove ip for dom, caused: {}", result1.content); + throw new IllegalArgumentException("failed to remove ip for dom, caused " + result1.content); +2:984,985c +3:984,985c + IpAddress ipAddr = IpAddress.fromJSON(ip); + removedIPs.add(ipAddr); +====1 +1:1361,1379c + + return "ok"; + } + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + if (CollectionUtils.isEmpty(ipObjList)) { + throw new IllegalArgumentException("Empty ip list"); + } + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + long timestamp = 1; + if (RaftCore.getDatum(key) != null) { + timestamp = RaftCore.getDatum(key).timestamp.get(); +2:986a +3:986a +====1 +1:1382,1412c + if (RaftCore.isLeader()) { + + try { + + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onRemvIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnRemvIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnRemvIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + Loggers.EVT_LOG.info("dom: {} {POS} {IP-REMV} new: {} operatorIP: {}", + dom, ipListString, WebUtils.optional(request, "clientIP", "unknown")); + } +2:989c +3:989c + serviceManager.removeInstance(namespaceId, serviceName, removedIPs.toArray(new IpAddress[removedIPs.size()])); +====1 +1:1426,1428c + int failedPushCount = PushService.getFailedPushCount(); + result.put("succeed", PushService.getTotalPush() - failedPushCount); + result.put("total", PushService.getTotalPush()); +2:1003,1005c +3:1003,1005c + int failedPushCount = pushService.getFailedPushCount(); + result.put("succeed", pushService.getTotalPush() - failedPushCount); + result.put("total", pushService.getTotalPush()); +====1 +1:1430,1431c + if (PushService.getTotalPush() > 0) { + result.put("ratio", ((float) PushService.getTotalPush() - failedPushCount) / PushService.getTotalPush()); +2:1007,1008c +3:1007,1008c + if (pushService.getTotalPush() > 0) { + result.put("ratio", ((float) pushService.getTotalPush() - failedPushCount) / pushService.getTotalPush()); +====1 +1:1457,1459c + + ReentrantLock lock = new ReentrantLock(); + +2:1033a +3:1033a +====1 +1:1463a +2:1038,1039c +3:1038,1039c + String entry = WebUtils.required(request, "entry"); + String value = WebUtils.required(request, "value"); +====1 +1:1465,1846c + if (!RaftCore.isLeader() && !debug) { + Map tmpParams = new HashMap<>(16); + for (Map.Entry entry : request.getParameterMap().entrySet()) { + tmpParams.put(entry.getKey(), entry.getValue()[0]); + } + + RaftProxy.proxyGET(UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/updateSwitch", tmpParams); + return "ok"; + } + + try { + lock.lock(); + String entry = WebUtils.required(request, "entry"); + + Datum datum = RaftCore.getDatum(UtilsAndCommons.DOMAINS_DATA_ID_PRE + UtilsAndCommons.SWITCH_DOMAIN_NAME); + SwitchDomain switchDomain = null; + + if (datum != null) { + switchDomain = JSON.parseObject(datum.value, SwitchDomain.class); + } else { + Loggers.SRV_LOG.warn("datum: {}{} is null", UtilsAndCommons.DOMAINS_DATA_ID_PRE, UtilsAndCommons.SWITCH_DOMAIN_NAME); + } + + if (SwitchEntry.BATCH.equals(entry)) { + //batch update + SwitchDomain dom = JSON.parseObject(WebUtils.required(request, "json"), SwitchDomain.class); + dom.setEnableStandalone(Switch.isEnableStandalone()); + if (dom.httpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN + || dom.tcpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN) { + + throw new IllegalArgumentException("min check time for http or tcp is too small(<500)"); + } + + if (dom.httpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX + || dom.tcpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX) { + + throw new IllegalArgumentException("max check time for http or tcp is too small(<3000)"); + } + + if (dom.httpHealthParams.getFactor() < 0 + || dom.httpHealthParams.getFactor() > 1 + || dom.tcpHealthParams.getFactor() < 0 + || dom.tcpHealthParams.getFactor() > 1) { + + throw new IllegalArgumentException("malformed factor"); + } + + Switch.setDom(dom); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (switchDomain != null) { + Switch.setDom(switchDomain); + } + + if (entry.equals(SwitchEntry.DISTRO_THRESHOLD)) { + Float threshold = Float.parseFloat(WebUtils.required(request, "distroThreshold")); + + if (threshold <= 0) { + throw new IllegalArgumentException("distroThreshold can not be zero or negative: " + threshold); + } + + Switch.setDistroThreshold(threshold); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + + if (entry.equals(SwitchEntry.ENABLE_ALL_DOM_NAME_CACHE)) { + Boolean enable = Boolean.parseBoolean(WebUtils.required(request, "enableAllDomNameCache")); + Switch.setAllDomNameCache(enable); + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.INCREMENTAL_LIST)) { + String action = WebUtils.required(request, "action"); + List doms = Arrays.asList(WebUtils.required(request, "incrementalList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getIncrementalList().addAll(doms); + } else if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getIncrementalList().removeAll(doms); + } else { + throw new IllegalArgumentException("action is not allowed: " + action); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_WHITLE_LIST)) { + String action = WebUtils.required(request, "action"); + List whiteList = Arrays.asList(WebUtils.required(request, "healthCheckWhiteList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getHealthCheckWhiteList().addAll(whiteList); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getHealthCheckWhiteList().removeAll(whiteList); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.CLIENT_BEAT_INTERVAL)) { + long clientBeatInterval = Long.parseLong(WebUtils.required(request, "clientBeatInterval")); + Switch.setClientBeatInterval(clientBeatInterval); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setPushJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setPushPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setPushCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_GO, type)) { + Switch.setPushGoVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.TRAFFIC_SCHEDULING_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setTrafficSchedulingJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setTrafficSchedulingPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setTrafficSchedulingCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_TENGINE, type)) { + Switch.setTrafficSchedulingTengineVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_PUSH_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min cache time for http or tcp is too small(<10000)"); + } + + Switch.setPushCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + // extremely careful while modifying this, cause it will affect all clients without pushing enabled + if (entry.equals(SwitchEntry.DEFAULT_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min default cache time is too small(<1000)"); + } + + Switch.setCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.MASTERS)) { + List masters = Arrays.asList(WebUtils.required(request, "names").split(",")); + + Switch.setMasters(masters); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISTRO)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setDistroEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.CHECK)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setHeathCheckEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DEFAULT_HEALTH_CHECK_MODE)) { + String defaultHealthCheckMode = WebUtils.required(request, "mode"); + + Switch.setDefaultHealthCheckMode(defaultHealthCheckMode); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DOM_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_DOM_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("domStatusSynchronizationPeriodMillis is too small(<5000)"); + } + + Switch.setDomStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SERVER_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_SERVER_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("serverStatusSynchronizationPeriodMillis is too small(<15000)"); + } + + Switch.setServerStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_TIMES)) { + Integer times = Integer.parseInt(WebUtils.required(request, "times")); + + Switch.setCheckTimes(times); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISABLE_ADD_IP)) { + boolean disableAddIP = Boolean.parseBoolean(WebUtils.required(request, "disableAddIP")); + + Switch.setDisableAddIP(disableAddIP); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.ENABLE_CACHE)) { + boolean enableCache = Boolean.parseBoolean(WebUtils.required(request, "enableCache")); + + Switch.setEnableCache(enableCache); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SEND_BEAT_ONLY)) { + boolean sendBeatOnly = Boolean.parseBoolean(WebUtils.required(request, "sendBeatOnly")); + + Switch.setSendBeatOnly(sendBeatOnly); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.LIMITED_URL_MAP)) { + Map limitedUrlMap = new HashMap<>(16); + String limitedUrls = WebUtils.required(request, "limitedUrls"); + + if (!StringUtils.isEmpty(limitedUrls)) { + String[] entries = limitedUrls.split(","); + for (int i = 0; i < entries.length; i++) { + String[] parts = entries[i].split(":"); + if (parts.length < 2) { + throw new IllegalArgumentException("invalid input for limited urls"); + } + + String limitedUrl = parts[0]; + if (StringUtils.isEmpty(limitedUrl)) { + throw new IllegalArgumentException("url can not be empty, url: " + limitedUrl); + } + + int statusCode = Integer.parseInt(parts[1]); + if (statusCode <= 0) { + throw new IllegalArgumentException("illegal normal status code: " + statusCode); + } + + limitedUrlMap.put(limitedUrl, statusCode); + + } + + Switch.setLimitedUrlMap(limitedUrlMap); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.ENABLE_STANDALONE)) { + String enable = WebUtils.required(request, "enableStandalone"); + + if (!StringUtils.isNotEmpty(enable)) { + Switch.setEnableStandalone(Boolean.parseBoolean(enable)); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + + throw new IllegalArgumentException("update entry not found: " + entry); + } finally { + lock.unlock(); + } + +2:1041c +3:1041c + switchManager.update(entry, value, debug); +====1 +1:1847a +2:1043c +3:1043c + return "ok"; +====1 +1:1858c + return JSON.parseObject(Switch.getDom().toJSON()); +2:1054c +3:1054c + return JSON.parseObject(switchDomain.toJSON()); +====2 +1:1902a +3:1098a +2:1099,1129c + Map> domMap = domainsManager.getAllDomNames(); + JSONObject result = new JSONObject(); + // For old DNS-F client: + String dnsfVersion = "1.0.1"; + String agent = request.getHeader("Client-Version"); + ClientInfo clientInfo = new ClientInfo(agent); + if (clientInfo.type == ClientInfo.ClientType.DNS && clientInfo.version.compareTo(VersionUtil.parseVersion(dnsfVersion)) <= 0) { + + List doms = new ArrayList(); + Set domSet = null; + + if (domMap.containsKey(Constants.REQUEST_PARAM_DEFAULT_NAMESPACE_ID)) { + domSet = domMap.get(Constants.REQUEST_PARAM_DEFAULT_NAMESPACE_ID); + } + + if (CollectionUtils.isEmpty(domSet)) { + result.put("doms", new HashSet<>()); + result.put("count", 0); + return result; + } + + for (String dom : domSet) { + if (DistroMapper.responsible(dom) || !responsibleOnly) { + doms.add(dom); + } + } + + result.put("doms", doms); + result.put("count", doms.size()); + return result; + } +====2 +1:1904a +3:1100a +2:1132c + <<<<<<< HEAD +====1 +1:1906c + Map> domMap = domainsManager.getAllDomNames(); +2:1134c +3:1102c + Map> domMap = serviceManager.getAllDomNames(); +====2 +1:1907a +3:1103a +2:1136,1142c + ||||||| c863cbcde + + Map> domMap = domainsManager.getAllDomNames(); + + ======= + int count = 0; + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:1911c + if (DistroMapper.responsible(dom) || !responsibleOnly) { +2:1146c +3:1107c + if (distroMapper.responsible(dom) || !responsibleOnly) { +====2 +1:1914a +3:1110a +2:1150c + count += doms.get(namespaceId).size(); +====2 +1:1917,1918c +3:1113,1114c + JSONObject result = new JSONObject(); + +2:1152a +====2 +1:1920c +3:1116c + result.put("count", doms.size()); +2:1154c + result.put("count", count); +====1 +1:1933,1934c + List doms + = domainsManager.searchDomains(namespaceId, ".*" + expr + ".*"); +2:1167,1168c +3:1129,1130c + List doms + = serviceManager.searchDomains(namespaceId, ".*" + expr + ".*"); +====1 +1:1980c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1214c +3:1176c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2063c + domainsManager.easyAddOrReplaceDom(domObj); +2:1297c +3:1259c + serviceManager.addOrReplaceService(domObj); +====1 +1:2082c + result.put("status", DistroMapper.getDistroConfig()); +2:1316c +3:1278c + result.put("status", distroMapper.getDistroConfig()); +====1 +1:2087c + DistroMapper.clean(); +2:1321c +3:1283c + distroMapper.clean(); +====1 +1:2099,2100c + int domCount = domainsManager.getDomCount(); + int ipCount = domainsManager.getInstanceCount(); +2:1333,1334c +3:1295,1296c + int domCount = serviceManager.getDomCount(); + int ipCount = serviceManager.getInstanceCount(); +====1 +1:2102,2103c + int responsibleDomCount = domainsManager.getResponsibleDomCount(); + int responsibleIPCount = domainsManager.getResponsibleIPCount(); +2:1336,1337c +3:1298,1299c + int responsibleDomCount = serviceManager.getResponsibleDomCount(); + int responsibleIPCount = serviceManager.getResponsibleIPCount(); +====1 +1:2112c + result.put("notifyTask", RaftCore.notifier.getTaskSize()); +2:1345a +3:1307a +====1 +1:2198c + DistroMapper.onReceiveServerStatus(serverStatus); +2:1431c +3:1393c + distroMapper.onReceiveServerStatus(serverStatus); +====1 +1:2209c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1442c +3:1404c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2229c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1462c +3:1424c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2237c + result.put("responsibleServer", DistroMapper.mapSrv(dom)); +2:1470c +3:1432c + result.put("responsibleServer", distroMapper.mapSrv(dom)); +====1 +1:2246c + result.put("healthyList", DistroMapper.getHealthyList()); +2:1479c +3:1441c + result.put("healthyList", distroMapper.getHealthyList()); +====1 +1:2256c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1489c +3:1451c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2264c + result.put("responsible", DistroMapper.responsible(dom)); +2:1497c +3:1459c + result.put("responsible", distroMapper.responsible(dom)); +====1 +1:2275c + if (!NamingProxy.getServers().contains(serverIP)) { +2:1508c +3:1470c + if (!serverListManager.contains(serverIP)) { +====1 +1:2280c + DomainsManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, DomainsManager.DomainChecksum.class); +2:1513c +3:1475c + ServiceManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, ServiceManager.DomainChecksum.class); +====1 +1:2292c + Domain domain = domainsManager.getDomain(checksums.namespaceId, dom); +2:1525c +3:1487c + Domain domain = serviceManager.getService(checksums.namespaceId, dom); +====1 +1:2305c + domainsManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +2:1538c +3:1500c + serviceManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +====1 +1:2354c + pac.put("checkServer", DistroMapper.mapSrvName(vDom.getName())); +2:1587c +3:1549c + pac.put("checkServer", distroMapper.mapSrvName(vDom.getName())); +====1 +1:2432,2433c + public void setDomainsManager(DomainsManager domainsManager) { + this.domainsManager = domainsManager; +2:1665,1666c +3:1627,1628c + public void setServiceManager(ServiceManager serviceManager) { + this.serviceManager = serviceManager; diff --git a/src/python/merge_conflict_analysis_diffs/128/gitmerge_recursive_ignorespace/diff_ApiCommands.java.txt b/src/python/merge_conflict_analysis_diffs/128/gitmerge_recursive_ignorespace/diff_ApiCommands.java.txt new file mode 100644 index 0000000000..8a8c1cbcc2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/128/gitmerge_recursive_ignorespace/diff_ApiCommands.java.txt @@ -0,0 +1,1548 @@ +====1 +1:27a +2:28c +3:28c + import com.alibaba.nacos.naming.cluster.ServerListManager; +====1 +1:35,42c + import com.alibaba.nacos.naming.raft.Datum; + import com.alibaba.nacos.naming.raft.RaftCore; + import com.alibaba.nacos.naming.raft.RaftPeer; + import com.alibaba.nacos.naming.raft.RaftProxy; + import com.ning.http.client.AsyncCompletionHandler; + import com.ning.http.client.Response; + import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; + import org.apache.catalina.util.ParameterMap; +2:35a +3:35a +====1 +1:65,69c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.TimeUnit; + import java.util.concurrent.locks.Condition; + import java.util.concurrent.locks.Lock; + import java.util.concurrent.locks.ReentrantLock; +2:57a +3:57a +====1 +1:84c + protected DomainsManager domainsManager; +2:72,87c +3:72,87c + protected ServiceManager serviceManager; + + @Autowired + private SwitchManager switchManager; + + @Autowired + private ServerListManager serverListManager; + + @Autowired + private SwitchDomain switchDomain; + + @Autowired + private PushService pushService; + + @Autowired + private DistroMapper distroMapper; +====1 +1:112c + result.put("cacheMillis", Switch.getPushCacheMillis(client.getDom())); +2:115c +3:115c + result.put("cacheMillis", switchDomain.getPushCacheMillis(client.getDom())); +====1 +1:126c + Domain dom = domainsManager.getDomain(namespaceId, name); +2:129c +3:129c + Domain dom = serviceManager.getService(namespaceId, name); +====1 +1:138c + result.put("count", domainsManager.getDomCount()); +2:141c +3:141c + result.put("count", serviceManager.getDomCount()); +====1 +1:151c + = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:154c +3:154c + = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:182c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:185c +3:185c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:212c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:215c +3:215c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:273c + if (domainsManager.getDomain(namespaceId, dom) != null) { +2:276c +3:276c + if (serviceManager.getService(namespaceId, dom) != null) { +====1 +1:292,294c + String dom = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(dom)) { + dom = WebUtils.required(request, "dom"); +2:295,297c +3:295,297c + String serviceName = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(serviceName)) { + serviceName = WebUtils.required(request, "dom"); +====1 +1:296,297c + String app; + app = WebUtils.optional(request, "app", StringUtils.EMPTY); +2:299c +3:299c + +====1 +1:305c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, dom); +2:307c +3:307c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, serviceName); +====1 +1:308,314c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + Map stringMap = new HashMap<>(16); + stringMap.put(Constants.REQUEST_PARAM_SERVICE_NAME, Arrays.asList(dom).toArray(new String[1])); + stringMap.put("enableClientBeat", Arrays.asList("true").toArray(new String[1])); + stringMap.put("cktype", Arrays.asList("TCP").toArray(new String[1])); + stringMap.put("appName", Arrays.asList(app).toArray(new String[1])); + stringMap.put("clusterName", Arrays.asList(clusterName).toArray(new String[1])); +2:310,311c +3:310,311c + IpAddress ipAddress = serviceManager.getInstance(namespaceId, serviceName, clientBeat.getCluster(), clientBeat.getIp(), + clientBeat.getPort()); +====1 +1:316,319c + //if domain does not exist, register it. + if (virtualClusterDomain == null) { + regDom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("dom not found, register it, dom: {}", dom); +2:313,322c +3:313,322c + if (ipAddress == null) { + ipAddress = new IpAddress(); + ipAddress.setPort(clientBeat.getPort()); + ipAddress.setIp(clientBeat.getIp()); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(serviceName); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:322,325c + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + String ip = clientBeat.getIp(); + int port = clientBeat.getPort(); +2:325c +3:325c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:327,354c + IpAddress ipAddress = new IpAddress(); + ipAddress.setPort(port); + ipAddress.setIp(ip); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(dom); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } + + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", Switch.getClientBeatInterval()); + + if (!virtualClusterDomain.allIPs().contains(ipAddress)) { + + if (!virtualClusterDomain.getEnableClientBeat()) { + return result; + } + + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("ip not found, register it, dom: {}, ip: {}", dom, ipAddress); +2:327,328c +3:327,328c + if (virtualClusterDomain == null) { + throw new NacosException(NacosException.SERVER_ERROR, "service not found: " + serviceName + "@" + namespaceId); +====1 +1:357,359c + if (!DistroMapper.responsible(dom)) { + String server = DistroMapper.mapSrv(dom); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", dom, server); +2:331,333c +3:331,333c + if (!distroMapper.responsible(serviceName)) { + String server = distroMapper.mapSrv(serviceName); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", serviceName, server); +====1 +1:381a +2:356,359c +3:356,359c + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", switchDomain.getClientBeatInterval()); + +====1 +1:385c + +2:362a +3:362a +====1 +1:400c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +2:377c +3:377c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +====1 +1:406c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +2:383c +3:383c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +====1 +1:503c + domainsManager.easyAddOrReplaceDom(domObj); +2:480c +3:480c + serviceManager.addOrReplaceService(domObj); +====1 +1:534c + String dom = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +2:511c +3:511c + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +====1 +1:536c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:513c +3:513c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:541,547c + ParameterMap parameterMap = new ParameterMap<>(); + parameterMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + parameterMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + parameterMap.put("json", Arrays.asList("true").toArray(new String[1])); + parameterMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + return remvIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, parameterMap)); +2:518c +3:518c + serviceManager.removeInstance(namespaceId, serviceName, ipAddress); +====1 +1:548a +2:520c +3:520c + return "ok"; +====1 +1:551c + @SuppressFBWarnings("JLM_JSR166_LOCK_MONITORENTER") +2:522a +3:522a +====1 +1:555,556c + String dom = WebUtils.required(request, "serviceName"); + String tenant = WebUtils.optional(request, "tid", StringUtils.EMPTY); +2:526,527c +3:526,527c + String serviceName = WebUtils.required(request, "serviceName"); + String clusterName = WebUtils.required(request, "clusterName"); +====1 +1:558c + String env = WebUtils.optional(request, "env", StringUtils.EMPTY); +2:528a +3:528a +====1 +1:560,562c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, UtilsAndCommons.getDefaultNamespaceId()); + + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:530,531c +3:530,531c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +====1 +1:566c + ipAddress.setServiceName(dom); +2:535c +3:535c + ipAddress.setServiceName(serviceName); +====1 +1:573,616c + if (virtualClusterDomain == null) { + + Lock lock = domainsManager.addLockIfAbsent(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + Condition condition = domainsManager.addCondtion(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + try { + regDom(request); + } catch (Exception e) { + Loggers.SRV_LOG.error("[REG-SERIVCE] register service failed, service:" + dom, e); + } + } + }); + try { + lock.lock(); + condition.await(5000, TimeUnit.MILLISECONDS); + } finally { + lock.unlock(); + } + + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + } + + if (virtualClusterDomain != null) { + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(request); + } + + if (Loggers.SRV_LOG.isDebugEnabled()) { + Loggers.SRV_LOG.debug("reg-service add ip: {}|{}", dom, ipAddress.toJSON()); + } + + Map stringMap = new HashMap<>(16); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } else { + throw new IllegalArgumentException("dom not found: " + dom); + } +2:542c +3:542c + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:621c + +2:546a +3:546a +====1 +1:628c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, name); +2:553c +3:553c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, name); +====1 +1:633,637c + RaftPeer leader = RaftCore.getLeader(); + if (leader == null) { + throw new IllegalStateException("not leader at present, cannot update"); + } + +2:557a +3:557a +====1 +1:678c + if (cktype.equals(AbstractHealthCheckProcessor.HTTP_PROCESSOR.getType())) { +2:598c +3:598c + if (cktype.equals(HealthCheckType.HTTP.name().toLowerCase())) { +====1 +1:683c + } else if (cktype.equals(AbstractHealthCheckProcessor.TCP_PROCESSOR.getType())) { +2:603c +3:603c + } else if (cktype.equals(HealthCheckType.TCP.name().toLowerCase())) { +====1 +1:687c + } else if (cktype.equals(AbstractHealthCheckProcessor.MYSQL_PROCESSOR.getType())) { +2:607c +3:607c + } else if (cktype.equals(HealthCheckType.MYSQL.name().toLowerCase())) { +====1 +1:779c + domainsManager.easyAddOrReplaceDom(dom); +2:699c +3:699c + serviceManager.addOrReplaceService(dom); +====1 +1:787c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + domainsManager.getDomCount() +2:707c +3:707c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + serviceManager.getDomCount() +====1 +1:792c + +2:711a +3:711a +====1 +1:800c + if (domainsManager.getDomain(namespaceId, dom) == null) { +2:719c +3:719c + if (serviceManager.getService(namespaceId, dom) == null) { +====1 +1:804c + domainsManager.easyRemoveDom(namespaceId, dom); +2:723c +3:723c + serviceManager.easyRemoveDom(namespaceId, dom); +====1 +1:814c + Map> domMap = domainsManager.getAllDomNames(); +2:733c +3:733c + Map> domMap = serviceManager.getAllDomNames(); +====1 +1:818c + Domain domObj = domainsManager.getDomain(namespaceId, dom); +2:737c +3:737c + Domain domObj = serviceManager.getService(namespaceId, dom); +====1 +1:841,979c + @RequestMapping("/onAddIP4Dom") + public String onAddIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer {} tried to publish data but wasn't leader, leader: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: {}, cur-term: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term.get()); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + final String dom = WebUtils.required(request, "dom"); + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", Boolean.FALSE.toString())); + + String ipListString = WebUtils.required(request, "ipList"); + List newIPs = new ArrayList<>(); + + List ipList; + if (Boolean.parseBoolean(WebUtils.optional(request, SwitchEntry.PARAM_JSON, Boolean.FALSE.toString()))) { + newIPs = JSON.parseObject(ipListString, new TypeReference>() { + }); + } else { + ipList = Arrays.asList(ipListString.split(",")); + for (String ip : ipList) { + IpAddress ipAddr = IpAddress.fromJSON(ip); + newIPs.add(ipAddr); + } + } + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domainsManager.getDomain(namespaceId, dom).allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + domainsManager.easyAddIP4Dom(namespaceId, dom, newIPs, term); + + return "ok"; + } + + private void syncOnUpdateIP4Dom(String namespaceId, String dom, Map proxyParams, String action) throws InterruptedException { + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + final CountDownLatch countDownLatch = new CountDownLatch(RaftCore.getPeerSet().majorityCount()); + updateIpPublish(proxyParams, countDownLatch, action); + if (!countDownLatch.await(UtilsAndCommons.MAX_PUBLISH_WAIT_TIME_MILLIS, TimeUnit.MILLISECONDS)) { + Loggers.RAFT.info("data publish failed, key=" + key, ",notify timeout."); + throw new IllegalArgumentException("data publish failed, key=" + key); + } + } + + private void syncOnAddIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void asyncOnAddIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void syncOnRemvIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void asyncOnRemvIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void updateIpPublish(Map proxyParams, CountDownLatch countDownLatch, String action) { + + for (final String peer : RaftCore.getPeerSet().allServersWithoutMySelf()) { + + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + + String server = peer; + + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String api = action.equals("remove") ? "onRemvIP4Dom" : "onAddIP4Dom"; + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/" + api; + + try { + HttpClient.asyncHttpPost(url, null, proxyParams, new AsyncCompletionHandler() { + @Override + public Integer onCompleted(Response response) throws Exception { + if (response.getStatusCode() != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip params: " + proxyParams + + ",code: " + response.getStatusCode() + ", caused " + response.getResponseBody() + + ", server: " + peer); + return 1; + } + if (countDownLatch != null) { + countDownLatch.countDown(); + } + return 0; + } + }); + } catch (Exception e) { + Loggers.SRV_LOG.error(action + "-IP", "failed when publish to peer." + url, e); + } + } + }); + } + } + +2:759a +3:759a +====1 +1:984c + if (Switch.getDisableAddIP()) { +2:764c +3:764c + if (switchDomain.isDisableAddIP()) { +====1 +1:988,989c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +2:767a +3:767a +====1 +1:999a +2:778,784c +3:778,784c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String clusterName = WebUtils.required(request, "clusterName"); + +====1 +1:1005c + ipList = Arrays.asList(ipListString); +2:789a +3:789a +====1 +1:1016,1110c + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/addIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip for dom, caused {}", result1.content); + throw new IllegalArgumentException("failed to add ip for dom, caused " + result1.content); + } + + return "ok"; + } + + final String dom = WebUtils.required(request, "dom"); + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", "false")); + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domain.allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + + String key = UtilsAndCommons.getIPListStoreKey(domain); + + Datum datum = RaftCore.getDatum(key); + if (datum == null) { + try { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).lock(); + datum = RaftCore.getDatum(key); + if (datum == null) { + datum = new Datum(); + datum.key = key; + RaftCore.addDatum(datum); + } + } finally { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).unlock(); + } + } + + long timestamp = RaftCore.getDatum(key).timestamp.get(); + + if (RaftCore.isLeader()) { + try { + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onAddIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnAddIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnAddIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + } +2:800c +3:800c + serviceManager.addInstance(namespaceId, serviceName, clusterName, newIPs.toArray(new IpAddress[newIPs.size()])); +====1 +1:1119c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:809c +3:809c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:1127c + long cacheMillis = Switch.getCacheMillis(dom); +2:817c +3:817c + long cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1131,1132c + if (udpPort > 0 && PushService.canEnablePush(agent)) { + PushService.addClient(namespaceId, dom, +2:821,822c +3:821,822c + if (udpPort > 0 && pushService.canEnablePush(agent)) { + pushService.addClient(namespaceId, dom, +====1 +1:1139c + cacheMillis = Switch.getPushCacheMillis(dom); +2:829c +3:829c + cacheMillis = switchDomain.getPushCacheMillis(dom); +====1 +1:1143c + cacheMillis = Switch.getCacheMillis(dom); +2:833c +3:833c + cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1233,1274c + @RequestMapping("/onRemvIP4Dom") + public void onRemvIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Deleting IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer(" + JSON.toJSONString(clientIP) + ") tried to publish " + + "data but wasn't leader, leader: " + JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: " + + JSON.toJSONString(clientIP) + ", cur-term: " + JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + final String dom = WebUtils.required(request, "dom"); + final String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + List removedIPs = getIpAddresses(request); + + if (CollectionUtils.isEmpty(removedIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + domainsManager.easyRemvIP4Dom(namespaceId, dom, removedIPs, term); + } + +2:922a +3:922a +====1 +1:1279,1280c + if (DistroMapper.getLocalhostIP().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + DistroMapper.getLocalhostIP()); +2:927,928c +3:927,928c + if (NetUtils.localServer().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + NetUtils.localServer()); +====1 +1:1308,1314c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + String dom = WebUtils.required(request, "dom"); + String ipListString = WebUtils.required(request, "ipList"); + + if (Loggers.DEBUG_LOG.isDebugEnabled()) { + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: serviceName: {}, iplist: {}", dom, ipListString); +2:956,957c +3:956,957c + if (switchDomain.isDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); +====1 +1:1323c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments, params: {}", proxyParams); +2:966c +3:966c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: {}", proxyParams); +====1 +1:1326c + List ipList = new ArrayList<>(); +2:969,976c +3:969,976c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String ipListString = WebUtils.required(request, "ipList"); + final List ipList; + List removedIPs = new ArrayList<>(); +====1 +1:1328c + List ipObjList = new ArrayList<>(ipList.size()); +2:977a +3:977a +====1 +1:1330,1331c + ipList = Arrays.asList(ipListString); + ipObjList = JSON.parseObject(ipListString, new TypeReference>() { +2:979c +3:979c + removedIPs = JSON.parseObject(ipListString, new TypeReference>() { +====1 +1:1336,1359c + ipObjList.add(IpAddress.fromJSON(ip)); + } + } + + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/remvIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to remove ip for dom, caused: {}", result1.content); + throw new IllegalArgumentException("failed to remove ip for dom, caused " + result1.content); +2:984,985c +3:984,985c + IpAddress ipAddr = IpAddress.fromJSON(ip); + removedIPs.add(ipAddr); +====1 +1:1361,1379c + + return "ok"; + } + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + if (CollectionUtils.isEmpty(ipObjList)) { + throw new IllegalArgumentException("Empty ip list"); + } + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + long timestamp = 1; + if (RaftCore.getDatum(key) != null) { + timestamp = RaftCore.getDatum(key).timestamp.get(); +2:986a +3:986a +====1 +1:1382,1412c + if (RaftCore.isLeader()) { + + try { + + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onRemvIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnRemvIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnRemvIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + Loggers.EVT_LOG.info("dom: {} {POS} {IP-REMV} new: {} operatorIP: {}", + dom, ipListString, WebUtils.optional(request, "clientIP", "unknown")); + } +2:989c +3:989c + serviceManager.removeInstance(namespaceId, serviceName, removedIPs.toArray(new IpAddress[removedIPs.size()])); +====1 +1:1426,1428c + int failedPushCount = PushService.getFailedPushCount(); + result.put("succeed", PushService.getTotalPush() - failedPushCount); + result.put("total", PushService.getTotalPush()); +2:1003,1005c +3:1003,1005c + int failedPushCount = pushService.getFailedPushCount(); + result.put("succeed", pushService.getTotalPush() - failedPushCount); + result.put("total", pushService.getTotalPush()); +====1 +1:1430,1431c + if (PushService.getTotalPush() > 0) { + result.put("ratio", ((float) PushService.getTotalPush() - failedPushCount) / PushService.getTotalPush()); +2:1007,1008c +3:1007,1008c + if (pushService.getTotalPush() > 0) { + result.put("ratio", ((float) pushService.getTotalPush() - failedPushCount) / pushService.getTotalPush()); +====1 +1:1457,1459c + + ReentrantLock lock = new ReentrantLock(); + +2:1033a +3:1033a +====1 +1:1463a +2:1038,1039c +3:1038,1039c + String entry = WebUtils.required(request, "entry"); + String value = WebUtils.required(request, "value"); +====1 +1:1465,1846c + if (!RaftCore.isLeader() && !debug) { + Map tmpParams = new HashMap<>(16); + for (Map.Entry entry : request.getParameterMap().entrySet()) { + tmpParams.put(entry.getKey(), entry.getValue()[0]); + } + + RaftProxy.proxyGET(UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/updateSwitch", tmpParams); + return "ok"; + } + + try { + lock.lock(); + String entry = WebUtils.required(request, "entry"); + + Datum datum = RaftCore.getDatum(UtilsAndCommons.DOMAINS_DATA_ID_PRE + UtilsAndCommons.SWITCH_DOMAIN_NAME); + SwitchDomain switchDomain = null; + + if (datum != null) { + switchDomain = JSON.parseObject(datum.value, SwitchDomain.class); + } else { + Loggers.SRV_LOG.warn("datum: {}{} is null", UtilsAndCommons.DOMAINS_DATA_ID_PRE, UtilsAndCommons.SWITCH_DOMAIN_NAME); + } + + if (SwitchEntry.BATCH.equals(entry)) { + //batch update + SwitchDomain dom = JSON.parseObject(WebUtils.required(request, "json"), SwitchDomain.class); + dom.setEnableStandalone(Switch.isEnableStandalone()); + if (dom.httpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN + || dom.tcpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN) { + + throw new IllegalArgumentException("min check time for http or tcp is too small(<500)"); + } + + if (dom.httpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX + || dom.tcpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX) { + + throw new IllegalArgumentException("max check time for http or tcp is too small(<3000)"); + } + + if (dom.httpHealthParams.getFactor() < 0 + || dom.httpHealthParams.getFactor() > 1 + || dom.tcpHealthParams.getFactor() < 0 + || dom.tcpHealthParams.getFactor() > 1) { + + throw new IllegalArgumentException("malformed factor"); + } + + Switch.setDom(dom); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (switchDomain != null) { + Switch.setDom(switchDomain); + } + + if (entry.equals(SwitchEntry.DISTRO_THRESHOLD)) { + Float threshold = Float.parseFloat(WebUtils.required(request, "distroThreshold")); + + if (threshold <= 0) { + throw new IllegalArgumentException("distroThreshold can not be zero or negative: " + threshold); + } + + Switch.setDistroThreshold(threshold); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + + if (entry.equals(SwitchEntry.ENABLE_ALL_DOM_NAME_CACHE)) { + Boolean enable = Boolean.parseBoolean(WebUtils.required(request, "enableAllDomNameCache")); + Switch.setAllDomNameCache(enable); + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.INCREMENTAL_LIST)) { + String action = WebUtils.required(request, "action"); + List doms = Arrays.asList(WebUtils.required(request, "incrementalList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getIncrementalList().addAll(doms); + } else if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getIncrementalList().removeAll(doms); + } else { + throw new IllegalArgumentException("action is not allowed: " + action); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_WHITLE_LIST)) { + String action = WebUtils.required(request, "action"); + List whiteList = Arrays.asList(WebUtils.required(request, "healthCheckWhiteList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getHealthCheckWhiteList().addAll(whiteList); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getHealthCheckWhiteList().removeAll(whiteList); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.CLIENT_BEAT_INTERVAL)) { + long clientBeatInterval = Long.parseLong(WebUtils.required(request, "clientBeatInterval")); + Switch.setClientBeatInterval(clientBeatInterval); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setPushJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setPushPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setPushCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_GO, type)) { + Switch.setPushGoVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.TRAFFIC_SCHEDULING_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setTrafficSchedulingJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setTrafficSchedulingPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setTrafficSchedulingCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_TENGINE, type)) { + Switch.setTrafficSchedulingTengineVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_PUSH_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min cache time for http or tcp is too small(<10000)"); + } + + Switch.setPushCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + // extremely careful while modifying this, cause it will affect all clients without pushing enabled + if (entry.equals(SwitchEntry.DEFAULT_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min default cache time is too small(<1000)"); + } + + Switch.setCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.MASTERS)) { + List masters = Arrays.asList(WebUtils.required(request, "names").split(",")); + + Switch.setMasters(masters); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISTRO)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setDistroEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.CHECK)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setHeathCheckEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DEFAULT_HEALTH_CHECK_MODE)) { + String defaultHealthCheckMode = WebUtils.required(request, "mode"); + + Switch.setDefaultHealthCheckMode(defaultHealthCheckMode); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DOM_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_DOM_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("domStatusSynchronizationPeriodMillis is too small(<5000)"); + } + + Switch.setDomStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SERVER_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_SERVER_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("serverStatusSynchronizationPeriodMillis is too small(<15000)"); + } + + Switch.setServerStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_TIMES)) { + Integer times = Integer.parseInt(WebUtils.required(request, "times")); + + Switch.setCheckTimes(times); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISABLE_ADD_IP)) { + boolean disableAddIP = Boolean.parseBoolean(WebUtils.required(request, "disableAddIP")); + + Switch.setDisableAddIP(disableAddIP); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.ENABLE_CACHE)) { + boolean enableCache = Boolean.parseBoolean(WebUtils.required(request, "enableCache")); + + Switch.setEnableCache(enableCache); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SEND_BEAT_ONLY)) { + boolean sendBeatOnly = Boolean.parseBoolean(WebUtils.required(request, "sendBeatOnly")); + + Switch.setSendBeatOnly(sendBeatOnly); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.LIMITED_URL_MAP)) { + Map limitedUrlMap = new HashMap<>(16); + String limitedUrls = WebUtils.required(request, "limitedUrls"); + + if (!StringUtils.isEmpty(limitedUrls)) { + String[] entries = limitedUrls.split(","); + for (int i = 0; i < entries.length; i++) { + String[] parts = entries[i].split(":"); + if (parts.length < 2) { + throw new IllegalArgumentException("invalid input for limited urls"); + } + + String limitedUrl = parts[0]; + if (StringUtils.isEmpty(limitedUrl)) { + throw new IllegalArgumentException("url can not be empty, url: " + limitedUrl); + } + + int statusCode = Integer.parseInt(parts[1]); + if (statusCode <= 0) { + throw new IllegalArgumentException("illegal normal status code: " + statusCode); + } + + limitedUrlMap.put(limitedUrl, statusCode); + + } + + Switch.setLimitedUrlMap(limitedUrlMap); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.ENABLE_STANDALONE)) { + String enable = WebUtils.required(request, "enableStandalone"); + + if (!StringUtils.isNotEmpty(enable)) { + Switch.setEnableStandalone(Boolean.parseBoolean(enable)); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + + throw new IllegalArgumentException("update entry not found: " + entry); + } finally { + lock.unlock(); + } + +2:1041c +3:1041c + switchManager.update(entry, value, debug); +====1 +1:1847a +2:1043c +3:1043c + return "ok"; +====1 +1:1858c + return JSON.parseObject(Switch.getDom().toJSON()); +2:1054c +3:1054c + return JSON.parseObject(switchDomain.toJSON()); +====2 +1:1902a +3:1098a +2:1099,1105c + Map> domMap = domainsManager.getAllDomNames(); + JSONObject result = new JSONObject(); + // For old DNS-F client: + String dnsfVersion = "1.0.1"; + String agent = request.getHeader("Client-Version"); + ClientInfo clientInfo = new ClientInfo(agent); + if (clientInfo.type == ClientInfo.ClientType.DNS && clientInfo.version.compareTo(VersionUtil.parseVersion(dnsfVersion)) <= 0) { +====2 +1:1904c +3:1100c + Map> doms = new HashMap<>(16); +2:1107,1108c + List doms = new ArrayList(); + Set domSet = null; +==== +1:1906c + Map> domMap = domainsManager.getAllDomNames(); +2:1110,1135c + <<<<<<< HEAD + Map> domMap = serviceManager.getAllDomNames(); + ||||||| c863cbcde + Map> domMap = domainsManager.getAllDomNames(); + ======= + if (domMap.containsKey(Constants.REQUEST_PARAM_DEFAULT_NAMESPACE_ID)) { + domSet = domMap.get(Constants.REQUEST_PARAM_DEFAULT_NAMESPACE_ID); + } + + if (CollectionUtils.isEmpty(domSet)) { + result.put("doms", new HashSet<>()); + result.put("count", 0); + return result; + } + >>>>>>> TEMP_RIGHT_BRANCH + + for (String dom : domSet) { + if (DistroMapper.responsible(dom) || !responsibleOnly) { + doms.add(dom); + } + } + + result.put("doms", doms); + result.put("count", doms.size()); + return result; + } +3:1102c + Map> domMap = serviceManager.getAllDomNames(); +====2 +1:1907a +3:1103a +2:1137,1138c + Map> doms = new HashMap<>(16); + int count = 0; +====1 +1:1911c + if (DistroMapper.responsible(dom) || !responsibleOnly) { +2:1142c +3:1107c + if (distroMapper.responsible(dom) || !responsibleOnly) { +====2 +1:1914a +3:1110a +2:1146c + count += doms.get(namespaceId).size(); +====2 +1:1917,1918c +3:1113,1114c + JSONObject result = new JSONObject(); + +2:1148a +====2 +1:1920c +3:1116c + result.put("count", doms.size()); +2:1150c + result.put("count", count); +====1 +1:1933,1934c + List doms + = domainsManager.searchDomains(namespaceId, ".*" + expr + ".*"); +2:1163,1164c +3:1129,1130c + List doms + = serviceManager.searchDomains(namespaceId, ".*" + expr + ".*"); +====1 +1:1980c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1210c +3:1176c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2063c + domainsManager.easyAddOrReplaceDom(domObj); +2:1293c +3:1259c + serviceManager.addOrReplaceService(domObj); +====1 +1:2082c + result.put("status", DistroMapper.getDistroConfig()); +2:1312c +3:1278c + result.put("status", distroMapper.getDistroConfig()); +====1 +1:2087c + DistroMapper.clean(); +2:1317c +3:1283c + distroMapper.clean(); +====1 +1:2099,2100c + int domCount = domainsManager.getDomCount(); + int ipCount = domainsManager.getInstanceCount(); +2:1329,1330c +3:1295,1296c + int domCount = serviceManager.getDomCount(); + int ipCount = serviceManager.getInstanceCount(); +====1 +1:2102,2103c + int responsibleDomCount = domainsManager.getResponsibleDomCount(); + int responsibleIPCount = domainsManager.getResponsibleIPCount(); +2:1332,1333c +3:1298,1299c + int responsibleDomCount = serviceManager.getResponsibleDomCount(); + int responsibleIPCount = serviceManager.getResponsibleIPCount(); +====1 +1:2112c + result.put("notifyTask", RaftCore.notifier.getTaskSize()); +2:1341a +3:1307a +====1 +1:2198c + DistroMapper.onReceiveServerStatus(serverStatus); +2:1427c +3:1393c + distroMapper.onReceiveServerStatus(serverStatus); +====1 +1:2209c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1438c +3:1404c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2229c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1458c +3:1424c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2237c + result.put("responsibleServer", DistroMapper.mapSrv(dom)); +2:1466c +3:1432c + result.put("responsibleServer", distroMapper.mapSrv(dom)); +====1 +1:2246c + result.put("healthyList", DistroMapper.getHealthyList()); +2:1475c +3:1441c + result.put("healthyList", distroMapper.getHealthyList()); +====1 +1:2256c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1485c +3:1451c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2264c + result.put("responsible", DistroMapper.responsible(dom)); +2:1493c +3:1459c + result.put("responsible", distroMapper.responsible(dom)); +====1 +1:2275c + if (!NamingProxy.getServers().contains(serverIP)) { +2:1504c +3:1470c + if (!serverListManager.contains(serverIP)) { +====1 +1:2280c + DomainsManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, DomainsManager.DomainChecksum.class); +2:1509c +3:1475c + ServiceManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, ServiceManager.DomainChecksum.class); +====1 +1:2292c + Domain domain = domainsManager.getDomain(checksums.namespaceId, dom); +2:1521c +3:1487c + Domain domain = serviceManager.getService(checksums.namespaceId, dom); +====1 +1:2305c + domainsManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +2:1534c +3:1500c + serviceManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +====1 +1:2354c + pac.put("checkServer", DistroMapper.mapSrvName(vDom.getName())); +2:1583c +3:1549c + pac.put("checkServer", distroMapper.mapSrvName(vDom.getName())); +====1 +1:2432,2433c + public void setDomainsManager(DomainsManager domainsManager) { + this.domainsManager = domainsManager; +2:1661,1662c +3:1627,1628c + public void setServiceManager(ServiceManager serviceManager) { + this.serviceManager = serviceManager; diff --git a/src/python/merge_conflict_analysis_diffs/128/gitmerge_recursive_minimal/diff_ApiCommands.java.txt b/src/python/merge_conflict_analysis_diffs/128/gitmerge_recursive_minimal/diff_ApiCommands.java.txt new file mode 100644 index 0000000000..46e350c7ae --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/128/gitmerge_recursive_minimal/diff_ApiCommands.java.txt @@ -0,0 +1,1548 @@ +====1 +1:27a +2:28c +3:28c + import com.alibaba.nacos.naming.cluster.ServerListManager; +====1 +1:35,42c + import com.alibaba.nacos.naming.raft.Datum; + import com.alibaba.nacos.naming.raft.RaftCore; + import com.alibaba.nacos.naming.raft.RaftPeer; + import com.alibaba.nacos.naming.raft.RaftProxy; + import com.ning.http.client.AsyncCompletionHandler; + import com.ning.http.client.Response; + import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; + import org.apache.catalina.util.ParameterMap; +2:35a +3:35a +====1 +1:65,69c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.TimeUnit; + import java.util.concurrent.locks.Condition; + import java.util.concurrent.locks.Lock; + import java.util.concurrent.locks.ReentrantLock; +2:57a +3:57a +====1 +1:84c + protected DomainsManager domainsManager; +2:72,87c +3:72,87c + protected ServiceManager serviceManager; + + @Autowired + private SwitchManager switchManager; + + @Autowired + private ServerListManager serverListManager; + + @Autowired + private SwitchDomain switchDomain; + + @Autowired + private PushService pushService; + + @Autowired + private DistroMapper distroMapper; +====1 +1:112c + result.put("cacheMillis", Switch.getPushCacheMillis(client.getDom())); +2:115c +3:115c + result.put("cacheMillis", switchDomain.getPushCacheMillis(client.getDom())); +====1 +1:126c + Domain dom = domainsManager.getDomain(namespaceId, name); +2:129c +3:129c + Domain dom = serviceManager.getService(namespaceId, name); +====1 +1:138c + result.put("count", domainsManager.getDomCount()); +2:141c +3:141c + result.put("count", serviceManager.getDomCount()); +====1 +1:151c + = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:154c +3:154c + = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:182c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:185c +3:185c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:212c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:215c +3:215c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:273c + if (domainsManager.getDomain(namespaceId, dom) != null) { +2:276c +3:276c + if (serviceManager.getService(namespaceId, dom) != null) { +====1 +1:292,294c + String dom = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(dom)) { + dom = WebUtils.required(request, "dom"); +2:295,297c +3:295,297c + String serviceName = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(serviceName)) { + serviceName = WebUtils.required(request, "dom"); +====1 +1:296,297c + String app; + app = WebUtils.optional(request, "app", StringUtils.EMPTY); +2:299c +3:299c + +====1 +1:305c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, dom); +2:307c +3:307c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, serviceName); +====1 +1:308,314c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + Map stringMap = new HashMap<>(16); + stringMap.put(Constants.REQUEST_PARAM_SERVICE_NAME, Arrays.asList(dom).toArray(new String[1])); + stringMap.put("enableClientBeat", Arrays.asList("true").toArray(new String[1])); + stringMap.put("cktype", Arrays.asList("TCP").toArray(new String[1])); + stringMap.put("appName", Arrays.asList(app).toArray(new String[1])); + stringMap.put("clusterName", Arrays.asList(clusterName).toArray(new String[1])); +2:310,311c +3:310,311c + IpAddress ipAddress = serviceManager.getInstance(namespaceId, serviceName, clientBeat.getCluster(), clientBeat.getIp(), + clientBeat.getPort()); +====1 +1:316,319c + //if domain does not exist, register it. + if (virtualClusterDomain == null) { + regDom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("dom not found, register it, dom: {}", dom); +2:313,322c +3:313,322c + if (ipAddress == null) { + ipAddress = new IpAddress(); + ipAddress.setPort(clientBeat.getPort()); + ipAddress.setIp(clientBeat.getIp()); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(serviceName); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:322,325c + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + String ip = clientBeat.getIp(); + int port = clientBeat.getPort(); +2:325c +3:325c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:327,354c + IpAddress ipAddress = new IpAddress(); + ipAddress.setPort(port); + ipAddress.setIp(ip); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(dom); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } + + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", Switch.getClientBeatInterval()); + + if (!virtualClusterDomain.allIPs().contains(ipAddress)) { + + if (!virtualClusterDomain.getEnableClientBeat()) { + return result; + } + + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("ip not found, register it, dom: {}, ip: {}", dom, ipAddress); +2:327,328c +3:327,328c + if (virtualClusterDomain == null) { + throw new NacosException(NacosException.SERVER_ERROR, "service not found: " + serviceName + "@" + namespaceId); +====1 +1:357,359c + if (!DistroMapper.responsible(dom)) { + String server = DistroMapper.mapSrv(dom); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", dom, server); +2:331,333c +3:331,333c + if (!distroMapper.responsible(serviceName)) { + String server = distroMapper.mapSrv(serviceName); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", serviceName, server); +====1 +1:381a +2:356,359c +3:356,359c + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", switchDomain.getClientBeatInterval()); + +====1 +1:385c + +2:362a +3:362a +====1 +1:400c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +2:377c +3:377c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +====1 +1:406c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +2:383c +3:383c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +====1 +1:503c + domainsManager.easyAddOrReplaceDom(domObj); +2:480c +3:480c + serviceManager.addOrReplaceService(domObj); +====1 +1:534c + String dom = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +2:511c +3:511c + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +====1 +1:536c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:513c +3:513c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:541,547c + ParameterMap parameterMap = new ParameterMap<>(); + parameterMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + parameterMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + parameterMap.put("json", Arrays.asList("true").toArray(new String[1])); + parameterMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + return remvIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, parameterMap)); +2:518c +3:518c + serviceManager.removeInstance(namespaceId, serviceName, ipAddress); +====1 +1:548a +2:520c +3:520c + return "ok"; +====1 +1:551c + @SuppressFBWarnings("JLM_JSR166_LOCK_MONITORENTER") +2:522a +3:522a +====1 +1:555,556c + String dom = WebUtils.required(request, "serviceName"); + String tenant = WebUtils.optional(request, "tid", StringUtils.EMPTY); +2:526,527c +3:526,527c + String serviceName = WebUtils.required(request, "serviceName"); + String clusterName = WebUtils.required(request, "clusterName"); +====1 +1:558c + String env = WebUtils.optional(request, "env", StringUtils.EMPTY); +2:528a +3:528a +====1 +1:560,562c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, UtilsAndCommons.getDefaultNamespaceId()); + + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:530,531c +3:530,531c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +====1 +1:566c + ipAddress.setServiceName(dom); +2:535c +3:535c + ipAddress.setServiceName(serviceName); +====1 +1:573,616c + if (virtualClusterDomain == null) { + + Lock lock = domainsManager.addLockIfAbsent(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + Condition condition = domainsManager.addCondtion(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + try { + regDom(request); + } catch (Exception e) { + Loggers.SRV_LOG.error("[REG-SERIVCE] register service failed, service:" + dom, e); + } + } + }); + try { + lock.lock(); + condition.await(5000, TimeUnit.MILLISECONDS); + } finally { + lock.unlock(); + } + + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + } + + if (virtualClusterDomain != null) { + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(request); + } + + if (Loggers.SRV_LOG.isDebugEnabled()) { + Loggers.SRV_LOG.debug("reg-service add ip: {}|{}", dom, ipAddress.toJSON()); + } + + Map stringMap = new HashMap<>(16); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } else { + throw new IllegalArgumentException("dom not found: " + dom); + } +2:542c +3:542c + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:621c + +2:546a +3:546a +====1 +1:628c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, name); +2:553c +3:553c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, name); +====1 +1:633,637c + RaftPeer leader = RaftCore.getLeader(); + if (leader == null) { + throw new IllegalStateException("not leader at present, cannot update"); + } + +2:557a +3:557a +====1 +1:678c + if (cktype.equals(AbstractHealthCheckProcessor.HTTP_PROCESSOR.getType())) { +2:598c +3:598c + if (cktype.equals(HealthCheckType.HTTP.name().toLowerCase())) { +====1 +1:683c + } else if (cktype.equals(AbstractHealthCheckProcessor.TCP_PROCESSOR.getType())) { +2:603c +3:603c + } else if (cktype.equals(HealthCheckType.TCP.name().toLowerCase())) { +====1 +1:687c + } else if (cktype.equals(AbstractHealthCheckProcessor.MYSQL_PROCESSOR.getType())) { +2:607c +3:607c + } else if (cktype.equals(HealthCheckType.MYSQL.name().toLowerCase())) { +====1 +1:779c + domainsManager.easyAddOrReplaceDom(dom); +2:699c +3:699c + serviceManager.addOrReplaceService(dom); +====1 +1:787c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + domainsManager.getDomCount() +2:707c +3:707c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + serviceManager.getDomCount() +====1 +1:792c + +2:711a +3:711a +====1 +1:800c + if (domainsManager.getDomain(namespaceId, dom) == null) { +2:719c +3:719c + if (serviceManager.getService(namespaceId, dom) == null) { +====1 +1:804c + domainsManager.easyRemoveDom(namespaceId, dom); +2:723c +3:723c + serviceManager.easyRemoveDom(namespaceId, dom); +====1 +1:814c + Map> domMap = domainsManager.getAllDomNames(); +2:733c +3:733c + Map> domMap = serviceManager.getAllDomNames(); +====1 +1:818c + Domain domObj = domainsManager.getDomain(namespaceId, dom); +2:737c +3:737c + Domain domObj = serviceManager.getService(namespaceId, dom); +====1 +1:841,979c + @RequestMapping("/onAddIP4Dom") + public String onAddIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer {} tried to publish data but wasn't leader, leader: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: {}, cur-term: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term.get()); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + final String dom = WebUtils.required(request, "dom"); + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", Boolean.FALSE.toString())); + + String ipListString = WebUtils.required(request, "ipList"); + List newIPs = new ArrayList<>(); + + List ipList; + if (Boolean.parseBoolean(WebUtils.optional(request, SwitchEntry.PARAM_JSON, Boolean.FALSE.toString()))) { + newIPs = JSON.parseObject(ipListString, new TypeReference>() { + }); + } else { + ipList = Arrays.asList(ipListString.split(",")); + for (String ip : ipList) { + IpAddress ipAddr = IpAddress.fromJSON(ip); + newIPs.add(ipAddr); + } + } + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domainsManager.getDomain(namespaceId, dom).allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + domainsManager.easyAddIP4Dom(namespaceId, dom, newIPs, term); + + return "ok"; + } + + private void syncOnUpdateIP4Dom(String namespaceId, String dom, Map proxyParams, String action) throws InterruptedException { + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + final CountDownLatch countDownLatch = new CountDownLatch(RaftCore.getPeerSet().majorityCount()); + updateIpPublish(proxyParams, countDownLatch, action); + if (!countDownLatch.await(UtilsAndCommons.MAX_PUBLISH_WAIT_TIME_MILLIS, TimeUnit.MILLISECONDS)) { + Loggers.RAFT.info("data publish failed, key=" + key, ",notify timeout."); + throw new IllegalArgumentException("data publish failed, key=" + key); + } + } + + private void syncOnAddIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void asyncOnAddIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void syncOnRemvIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void asyncOnRemvIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void updateIpPublish(Map proxyParams, CountDownLatch countDownLatch, String action) { + + for (final String peer : RaftCore.getPeerSet().allServersWithoutMySelf()) { + + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + + String server = peer; + + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String api = action.equals("remove") ? "onRemvIP4Dom" : "onAddIP4Dom"; + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/" + api; + + try { + HttpClient.asyncHttpPost(url, null, proxyParams, new AsyncCompletionHandler() { + @Override + public Integer onCompleted(Response response) throws Exception { + if (response.getStatusCode() != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip params: " + proxyParams + + ",code: " + response.getStatusCode() + ", caused " + response.getResponseBody() + + ", server: " + peer); + return 1; + } + if (countDownLatch != null) { + countDownLatch.countDown(); + } + return 0; + } + }); + } catch (Exception e) { + Loggers.SRV_LOG.error(action + "-IP", "failed when publish to peer." + url, e); + } + } + }); + } + } + +2:759a +3:759a +====1 +1:984c + if (Switch.getDisableAddIP()) { +2:764c +3:764c + if (switchDomain.isDisableAddIP()) { +====1 +1:988,989c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +2:767a +3:767a +====1 +1:999a +2:778,784c +3:778,784c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String clusterName = WebUtils.required(request, "clusterName"); + +====1 +1:1005c + ipList = Arrays.asList(ipListString); +2:789a +3:789a +====1 +1:1016,1110c + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/addIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip for dom, caused {}", result1.content); + throw new IllegalArgumentException("failed to add ip for dom, caused " + result1.content); + } + + return "ok"; + } + + final String dom = WebUtils.required(request, "dom"); + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", "false")); + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domain.allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + + String key = UtilsAndCommons.getIPListStoreKey(domain); + + Datum datum = RaftCore.getDatum(key); + if (datum == null) { + try { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).lock(); + datum = RaftCore.getDatum(key); + if (datum == null) { + datum = new Datum(); + datum.key = key; + RaftCore.addDatum(datum); + } + } finally { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).unlock(); + } + } + + long timestamp = RaftCore.getDatum(key).timestamp.get(); + + if (RaftCore.isLeader()) { + try { + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onAddIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnAddIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnAddIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + } +2:800c +3:800c + serviceManager.addInstance(namespaceId, serviceName, clusterName, newIPs.toArray(new IpAddress[newIPs.size()])); +====1 +1:1119c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:809c +3:809c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:1127c + long cacheMillis = Switch.getCacheMillis(dom); +2:817c +3:817c + long cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1131,1132c + if (udpPort > 0 && PushService.canEnablePush(agent)) { + PushService.addClient(namespaceId, dom, +2:821,822c +3:821,822c + if (udpPort > 0 && pushService.canEnablePush(agent)) { + pushService.addClient(namespaceId, dom, +====1 +1:1139c + cacheMillis = Switch.getPushCacheMillis(dom); +2:829c +3:829c + cacheMillis = switchDomain.getPushCacheMillis(dom); +====1 +1:1143c + cacheMillis = Switch.getCacheMillis(dom); +2:833c +3:833c + cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1233,1274c + @RequestMapping("/onRemvIP4Dom") + public void onRemvIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Deleting IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer(" + JSON.toJSONString(clientIP) + ") tried to publish " + + "data but wasn't leader, leader: " + JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: " + + JSON.toJSONString(clientIP) + ", cur-term: " + JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + final String dom = WebUtils.required(request, "dom"); + final String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + List removedIPs = getIpAddresses(request); + + if (CollectionUtils.isEmpty(removedIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + domainsManager.easyRemvIP4Dom(namespaceId, dom, removedIPs, term); + } + +2:922a +3:922a +====1 +1:1279,1280c + if (DistroMapper.getLocalhostIP().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + DistroMapper.getLocalhostIP()); +2:927,928c +3:927,928c + if (NetUtils.localServer().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + NetUtils.localServer()); +====1 +1:1308,1314c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + String dom = WebUtils.required(request, "dom"); + String ipListString = WebUtils.required(request, "ipList"); + + if (Loggers.DEBUG_LOG.isDebugEnabled()) { + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: serviceName: {}, iplist: {}", dom, ipListString); +2:956,957c +3:956,957c + if (switchDomain.isDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); +====1 +1:1323c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments, params: {}", proxyParams); +2:966c +3:966c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: {}", proxyParams); +====1 +1:1326c + List ipList = new ArrayList<>(); +2:969,976c +3:969,976c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String ipListString = WebUtils.required(request, "ipList"); + final List ipList; + List removedIPs = new ArrayList<>(); +====1 +1:1328c + List ipObjList = new ArrayList<>(ipList.size()); +2:977a +3:977a +====1 +1:1330,1331c + ipList = Arrays.asList(ipListString); + ipObjList = JSON.parseObject(ipListString, new TypeReference>() { +2:979c +3:979c + removedIPs = JSON.parseObject(ipListString, new TypeReference>() { +====1 +1:1336,1359c + ipObjList.add(IpAddress.fromJSON(ip)); + } + } + + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/remvIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to remove ip for dom, caused: {}", result1.content); + throw new IllegalArgumentException("failed to remove ip for dom, caused " + result1.content); +2:984,985c +3:984,985c + IpAddress ipAddr = IpAddress.fromJSON(ip); + removedIPs.add(ipAddr); +====1 +1:1361,1379c + + return "ok"; + } + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + if (CollectionUtils.isEmpty(ipObjList)) { + throw new IllegalArgumentException("Empty ip list"); + } + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + long timestamp = 1; + if (RaftCore.getDatum(key) != null) { + timestamp = RaftCore.getDatum(key).timestamp.get(); +2:986a +3:986a +====1 +1:1382,1412c + if (RaftCore.isLeader()) { + + try { + + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onRemvIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnRemvIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnRemvIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + Loggers.EVT_LOG.info("dom: {} {POS} {IP-REMV} new: {} operatorIP: {}", + dom, ipListString, WebUtils.optional(request, "clientIP", "unknown")); + } +2:989c +3:989c + serviceManager.removeInstance(namespaceId, serviceName, removedIPs.toArray(new IpAddress[removedIPs.size()])); +====1 +1:1426,1428c + int failedPushCount = PushService.getFailedPushCount(); + result.put("succeed", PushService.getTotalPush() - failedPushCount); + result.put("total", PushService.getTotalPush()); +2:1003,1005c +3:1003,1005c + int failedPushCount = pushService.getFailedPushCount(); + result.put("succeed", pushService.getTotalPush() - failedPushCount); + result.put("total", pushService.getTotalPush()); +====1 +1:1430,1431c + if (PushService.getTotalPush() > 0) { + result.put("ratio", ((float) PushService.getTotalPush() - failedPushCount) / PushService.getTotalPush()); +2:1007,1008c +3:1007,1008c + if (pushService.getTotalPush() > 0) { + result.put("ratio", ((float) pushService.getTotalPush() - failedPushCount) / pushService.getTotalPush()); +====1 +1:1457,1459c + + ReentrantLock lock = new ReentrantLock(); + +2:1033a +3:1033a +====1 +1:1463a +2:1038,1039c +3:1038,1039c + String entry = WebUtils.required(request, "entry"); + String value = WebUtils.required(request, "value"); +====1 +1:1465,1846c + if (!RaftCore.isLeader() && !debug) { + Map tmpParams = new HashMap<>(16); + for (Map.Entry entry : request.getParameterMap().entrySet()) { + tmpParams.put(entry.getKey(), entry.getValue()[0]); + } + + RaftProxy.proxyGET(UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/updateSwitch", tmpParams); + return "ok"; + } + + try { + lock.lock(); + String entry = WebUtils.required(request, "entry"); + + Datum datum = RaftCore.getDatum(UtilsAndCommons.DOMAINS_DATA_ID_PRE + UtilsAndCommons.SWITCH_DOMAIN_NAME); + SwitchDomain switchDomain = null; + + if (datum != null) { + switchDomain = JSON.parseObject(datum.value, SwitchDomain.class); + } else { + Loggers.SRV_LOG.warn("datum: {}{} is null", UtilsAndCommons.DOMAINS_DATA_ID_PRE, UtilsAndCommons.SWITCH_DOMAIN_NAME); + } + + if (SwitchEntry.BATCH.equals(entry)) { + //batch update + SwitchDomain dom = JSON.parseObject(WebUtils.required(request, "json"), SwitchDomain.class); + dom.setEnableStandalone(Switch.isEnableStandalone()); + if (dom.httpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN + || dom.tcpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN) { + + throw new IllegalArgumentException("min check time for http or tcp is too small(<500)"); + } + + if (dom.httpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX + || dom.tcpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX) { + + throw new IllegalArgumentException("max check time for http or tcp is too small(<3000)"); + } + + if (dom.httpHealthParams.getFactor() < 0 + || dom.httpHealthParams.getFactor() > 1 + || dom.tcpHealthParams.getFactor() < 0 + || dom.tcpHealthParams.getFactor() > 1) { + + throw new IllegalArgumentException("malformed factor"); + } + + Switch.setDom(dom); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (switchDomain != null) { + Switch.setDom(switchDomain); + } + + if (entry.equals(SwitchEntry.DISTRO_THRESHOLD)) { + Float threshold = Float.parseFloat(WebUtils.required(request, "distroThreshold")); + + if (threshold <= 0) { + throw new IllegalArgumentException("distroThreshold can not be zero or negative: " + threshold); + } + + Switch.setDistroThreshold(threshold); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + + if (entry.equals(SwitchEntry.ENABLE_ALL_DOM_NAME_CACHE)) { + Boolean enable = Boolean.parseBoolean(WebUtils.required(request, "enableAllDomNameCache")); + Switch.setAllDomNameCache(enable); + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.INCREMENTAL_LIST)) { + String action = WebUtils.required(request, "action"); + List doms = Arrays.asList(WebUtils.required(request, "incrementalList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getIncrementalList().addAll(doms); + } else if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getIncrementalList().removeAll(doms); + } else { + throw new IllegalArgumentException("action is not allowed: " + action); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_WHITLE_LIST)) { + String action = WebUtils.required(request, "action"); + List whiteList = Arrays.asList(WebUtils.required(request, "healthCheckWhiteList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getHealthCheckWhiteList().addAll(whiteList); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getHealthCheckWhiteList().removeAll(whiteList); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.CLIENT_BEAT_INTERVAL)) { + long clientBeatInterval = Long.parseLong(WebUtils.required(request, "clientBeatInterval")); + Switch.setClientBeatInterval(clientBeatInterval); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setPushJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setPushPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setPushCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_GO, type)) { + Switch.setPushGoVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.TRAFFIC_SCHEDULING_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setTrafficSchedulingJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setTrafficSchedulingPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setTrafficSchedulingCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_TENGINE, type)) { + Switch.setTrafficSchedulingTengineVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_PUSH_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min cache time for http or tcp is too small(<10000)"); + } + + Switch.setPushCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + // extremely careful while modifying this, cause it will affect all clients without pushing enabled + if (entry.equals(SwitchEntry.DEFAULT_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min default cache time is too small(<1000)"); + } + + Switch.setCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.MASTERS)) { + List masters = Arrays.asList(WebUtils.required(request, "names").split(",")); + + Switch.setMasters(masters); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISTRO)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setDistroEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.CHECK)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setHeathCheckEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DEFAULT_HEALTH_CHECK_MODE)) { + String defaultHealthCheckMode = WebUtils.required(request, "mode"); + + Switch.setDefaultHealthCheckMode(defaultHealthCheckMode); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DOM_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_DOM_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("domStatusSynchronizationPeriodMillis is too small(<5000)"); + } + + Switch.setDomStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SERVER_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_SERVER_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("serverStatusSynchronizationPeriodMillis is too small(<15000)"); + } + + Switch.setServerStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_TIMES)) { + Integer times = Integer.parseInt(WebUtils.required(request, "times")); + + Switch.setCheckTimes(times); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISABLE_ADD_IP)) { + boolean disableAddIP = Boolean.parseBoolean(WebUtils.required(request, "disableAddIP")); + + Switch.setDisableAddIP(disableAddIP); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.ENABLE_CACHE)) { + boolean enableCache = Boolean.parseBoolean(WebUtils.required(request, "enableCache")); + + Switch.setEnableCache(enableCache); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SEND_BEAT_ONLY)) { + boolean sendBeatOnly = Boolean.parseBoolean(WebUtils.required(request, "sendBeatOnly")); + + Switch.setSendBeatOnly(sendBeatOnly); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.LIMITED_URL_MAP)) { + Map limitedUrlMap = new HashMap<>(16); + String limitedUrls = WebUtils.required(request, "limitedUrls"); + + if (!StringUtils.isEmpty(limitedUrls)) { + String[] entries = limitedUrls.split(","); + for (int i = 0; i < entries.length; i++) { + String[] parts = entries[i].split(":"); + if (parts.length < 2) { + throw new IllegalArgumentException("invalid input for limited urls"); + } + + String limitedUrl = parts[0]; + if (StringUtils.isEmpty(limitedUrl)) { + throw new IllegalArgumentException("url can not be empty, url: " + limitedUrl); + } + + int statusCode = Integer.parseInt(parts[1]); + if (statusCode <= 0) { + throw new IllegalArgumentException("illegal normal status code: " + statusCode); + } + + limitedUrlMap.put(limitedUrl, statusCode); + + } + + Switch.setLimitedUrlMap(limitedUrlMap); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.ENABLE_STANDALONE)) { + String enable = WebUtils.required(request, "enableStandalone"); + + if (!StringUtils.isNotEmpty(enable)) { + Switch.setEnableStandalone(Boolean.parseBoolean(enable)); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + + throw new IllegalArgumentException("update entry not found: " + entry); + } finally { + lock.unlock(); + } + +2:1041c +3:1041c + switchManager.update(entry, value, debug); +====1 +1:1847a +2:1043c +3:1043c + return "ok"; +====1 +1:1858c + return JSON.parseObject(Switch.getDom().toJSON()); +2:1054c +3:1054c + return JSON.parseObject(switchDomain.toJSON()); +====2 +1:1902a +3:1098a +2:1099,1105c + Map> domMap = domainsManager.getAllDomNames(); + JSONObject result = new JSONObject(); + // For old DNS-F client: + String dnsfVersion = "1.0.1"; + String agent = request.getHeader("Client-Version"); + ClientInfo clientInfo = new ClientInfo(agent); + if (clientInfo.type == ClientInfo.ClientType.DNS && clientInfo.version.compareTo(VersionUtil.parseVersion(dnsfVersion)) <= 0) { +====2 +1:1904c +3:1100c + Map> doms = new HashMap<>(16); +2:1107,1108c + List doms = new ArrayList(); + Set domSet = null; +==== +1:1906c + Map> domMap = domainsManager.getAllDomNames(); +2:1110,1135c + <<<<<<< HEAD + Map> domMap = serviceManager.getAllDomNames(); + ||||||| c863cbcde + Map> domMap = domainsManager.getAllDomNames(); + ======= + if (domMap.containsKey(Constants.REQUEST_PARAM_DEFAULT_NAMESPACE_ID)) { + domSet = domMap.get(Constants.REQUEST_PARAM_DEFAULT_NAMESPACE_ID); + } + >>>>>>> TEMP_RIGHT_BRANCH + + if (CollectionUtils.isEmpty(domSet)) { + result.put("doms", new HashSet<>()); + result.put("count", 0); + return result; + } + + for (String dom : domSet) { + if (DistroMapper.responsible(dom) || !responsibleOnly) { + doms.add(dom); + } + } + + result.put("doms", doms); + result.put("count", doms.size()); + return result; + } +3:1102c + Map> domMap = serviceManager.getAllDomNames(); +====2 +1:1907a +3:1103a +2:1137,1138c + Map> doms = new HashMap<>(16); + int count = 0; +====1 +1:1911c + if (DistroMapper.responsible(dom) || !responsibleOnly) { +2:1142c +3:1107c + if (distroMapper.responsible(dom) || !responsibleOnly) { +====2 +1:1914a +3:1110a +2:1146c + count += doms.get(namespaceId).size(); +====2 +1:1917,1918c +3:1113,1114c + JSONObject result = new JSONObject(); + +2:1148a +====2 +1:1920c +3:1116c + result.put("count", doms.size()); +2:1150c + result.put("count", count); +====1 +1:1933,1934c + List doms + = domainsManager.searchDomains(namespaceId, ".*" + expr + ".*"); +2:1163,1164c +3:1129,1130c + List doms + = serviceManager.searchDomains(namespaceId, ".*" + expr + ".*"); +====1 +1:1980c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1210c +3:1176c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2063c + domainsManager.easyAddOrReplaceDom(domObj); +2:1293c +3:1259c + serviceManager.addOrReplaceService(domObj); +====1 +1:2082c + result.put("status", DistroMapper.getDistroConfig()); +2:1312c +3:1278c + result.put("status", distroMapper.getDistroConfig()); +====1 +1:2087c + DistroMapper.clean(); +2:1317c +3:1283c + distroMapper.clean(); +====1 +1:2099,2100c + int domCount = domainsManager.getDomCount(); + int ipCount = domainsManager.getInstanceCount(); +2:1329,1330c +3:1295,1296c + int domCount = serviceManager.getDomCount(); + int ipCount = serviceManager.getInstanceCount(); +====1 +1:2102,2103c + int responsibleDomCount = domainsManager.getResponsibleDomCount(); + int responsibleIPCount = domainsManager.getResponsibleIPCount(); +2:1332,1333c +3:1298,1299c + int responsibleDomCount = serviceManager.getResponsibleDomCount(); + int responsibleIPCount = serviceManager.getResponsibleIPCount(); +====1 +1:2112c + result.put("notifyTask", RaftCore.notifier.getTaskSize()); +2:1341a +3:1307a +====1 +1:2198c + DistroMapper.onReceiveServerStatus(serverStatus); +2:1427c +3:1393c + distroMapper.onReceiveServerStatus(serverStatus); +====1 +1:2209c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1438c +3:1404c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2229c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1458c +3:1424c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2237c + result.put("responsibleServer", DistroMapper.mapSrv(dom)); +2:1466c +3:1432c + result.put("responsibleServer", distroMapper.mapSrv(dom)); +====1 +1:2246c + result.put("healthyList", DistroMapper.getHealthyList()); +2:1475c +3:1441c + result.put("healthyList", distroMapper.getHealthyList()); +====1 +1:2256c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1485c +3:1451c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2264c + result.put("responsible", DistroMapper.responsible(dom)); +2:1493c +3:1459c + result.put("responsible", distroMapper.responsible(dom)); +====1 +1:2275c + if (!NamingProxy.getServers().contains(serverIP)) { +2:1504c +3:1470c + if (!serverListManager.contains(serverIP)) { +====1 +1:2280c + DomainsManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, DomainsManager.DomainChecksum.class); +2:1509c +3:1475c + ServiceManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, ServiceManager.DomainChecksum.class); +====1 +1:2292c + Domain domain = domainsManager.getDomain(checksums.namespaceId, dom); +2:1521c +3:1487c + Domain domain = serviceManager.getService(checksums.namespaceId, dom); +====1 +1:2305c + domainsManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +2:1534c +3:1500c + serviceManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +====1 +1:2354c + pac.put("checkServer", DistroMapper.mapSrvName(vDom.getName())); +2:1583c +3:1549c + pac.put("checkServer", distroMapper.mapSrvName(vDom.getName())); +====1 +1:2432,2433c + public void setDomainsManager(DomainsManager domainsManager) { + this.domainsManager = domainsManager; +2:1661,1662c +3:1627,1628c + public void setServiceManager(ServiceManager serviceManager) { + this.serviceManager = serviceManager; diff --git a/src/python/merge_conflict_analysis_diffs/128/gitmerge_recursive_myers/diff_ApiCommands.java.txt b/src/python/merge_conflict_analysis_diffs/128/gitmerge_recursive_myers/diff_ApiCommands.java.txt new file mode 100644 index 0000000000..46e350c7ae --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/128/gitmerge_recursive_myers/diff_ApiCommands.java.txt @@ -0,0 +1,1548 @@ +====1 +1:27a +2:28c +3:28c + import com.alibaba.nacos.naming.cluster.ServerListManager; +====1 +1:35,42c + import com.alibaba.nacos.naming.raft.Datum; + import com.alibaba.nacos.naming.raft.RaftCore; + import com.alibaba.nacos.naming.raft.RaftPeer; + import com.alibaba.nacos.naming.raft.RaftProxy; + import com.ning.http.client.AsyncCompletionHandler; + import com.ning.http.client.Response; + import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; + import org.apache.catalina.util.ParameterMap; +2:35a +3:35a +====1 +1:65,69c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.TimeUnit; + import java.util.concurrent.locks.Condition; + import java.util.concurrent.locks.Lock; + import java.util.concurrent.locks.ReentrantLock; +2:57a +3:57a +====1 +1:84c + protected DomainsManager domainsManager; +2:72,87c +3:72,87c + protected ServiceManager serviceManager; + + @Autowired + private SwitchManager switchManager; + + @Autowired + private ServerListManager serverListManager; + + @Autowired + private SwitchDomain switchDomain; + + @Autowired + private PushService pushService; + + @Autowired + private DistroMapper distroMapper; +====1 +1:112c + result.put("cacheMillis", Switch.getPushCacheMillis(client.getDom())); +2:115c +3:115c + result.put("cacheMillis", switchDomain.getPushCacheMillis(client.getDom())); +====1 +1:126c + Domain dom = domainsManager.getDomain(namespaceId, name); +2:129c +3:129c + Domain dom = serviceManager.getService(namespaceId, name); +====1 +1:138c + result.put("count", domainsManager.getDomCount()); +2:141c +3:141c + result.put("count", serviceManager.getDomCount()); +====1 +1:151c + = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:154c +3:154c + = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:182c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:185c +3:185c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:212c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:215c +3:215c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:273c + if (domainsManager.getDomain(namespaceId, dom) != null) { +2:276c +3:276c + if (serviceManager.getService(namespaceId, dom) != null) { +====1 +1:292,294c + String dom = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(dom)) { + dom = WebUtils.required(request, "dom"); +2:295,297c +3:295,297c + String serviceName = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(serviceName)) { + serviceName = WebUtils.required(request, "dom"); +====1 +1:296,297c + String app; + app = WebUtils.optional(request, "app", StringUtils.EMPTY); +2:299c +3:299c + +====1 +1:305c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, dom); +2:307c +3:307c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, serviceName); +====1 +1:308,314c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + Map stringMap = new HashMap<>(16); + stringMap.put(Constants.REQUEST_PARAM_SERVICE_NAME, Arrays.asList(dom).toArray(new String[1])); + stringMap.put("enableClientBeat", Arrays.asList("true").toArray(new String[1])); + stringMap.put("cktype", Arrays.asList("TCP").toArray(new String[1])); + stringMap.put("appName", Arrays.asList(app).toArray(new String[1])); + stringMap.put("clusterName", Arrays.asList(clusterName).toArray(new String[1])); +2:310,311c +3:310,311c + IpAddress ipAddress = serviceManager.getInstance(namespaceId, serviceName, clientBeat.getCluster(), clientBeat.getIp(), + clientBeat.getPort()); +====1 +1:316,319c + //if domain does not exist, register it. + if (virtualClusterDomain == null) { + regDom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("dom not found, register it, dom: {}", dom); +2:313,322c +3:313,322c + if (ipAddress == null) { + ipAddress = new IpAddress(); + ipAddress.setPort(clientBeat.getPort()); + ipAddress.setIp(clientBeat.getIp()); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(serviceName); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:322,325c + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + String ip = clientBeat.getIp(); + int port = clientBeat.getPort(); +2:325c +3:325c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:327,354c + IpAddress ipAddress = new IpAddress(); + ipAddress.setPort(port); + ipAddress.setIp(ip); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(dom); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } + + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", Switch.getClientBeatInterval()); + + if (!virtualClusterDomain.allIPs().contains(ipAddress)) { + + if (!virtualClusterDomain.getEnableClientBeat()) { + return result; + } + + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("ip not found, register it, dom: {}, ip: {}", dom, ipAddress); +2:327,328c +3:327,328c + if (virtualClusterDomain == null) { + throw new NacosException(NacosException.SERVER_ERROR, "service not found: " + serviceName + "@" + namespaceId); +====1 +1:357,359c + if (!DistroMapper.responsible(dom)) { + String server = DistroMapper.mapSrv(dom); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", dom, server); +2:331,333c +3:331,333c + if (!distroMapper.responsible(serviceName)) { + String server = distroMapper.mapSrv(serviceName); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", serviceName, server); +====1 +1:381a +2:356,359c +3:356,359c + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", switchDomain.getClientBeatInterval()); + +====1 +1:385c + +2:362a +3:362a +====1 +1:400c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +2:377c +3:377c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +====1 +1:406c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +2:383c +3:383c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +====1 +1:503c + domainsManager.easyAddOrReplaceDom(domObj); +2:480c +3:480c + serviceManager.addOrReplaceService(domObj); +====1 +1:534c + String dom = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +2:511c +3:511c + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +====1 +1:536c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:513c +3:513c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:541,547c + ParameterMap parameterMap = new ParameterMap<>(); + parameterMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + parameterMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + parameterMap.put("json", Arrays.asList("true").toArray(new String[1])); + parameterMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + return remvIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, parameterMap)); +2:518c +3:518c + serviceManager.removeInstance(namespaceId, serviceName, ipAddress); +====1 +1:548a +2:520c +3:520c + return "ok"; +====1 +1:551c + @SuppressFBWarnings("JLM_JSR166_LOCK_MONITORENTER") +2:522a +3:522a +====1 +1:555,556c + String dom = WebUtils.required(request, "serviceName"); + String tenant = WebUtils.optional(request, "tid", StringUtils.EMPTY); +2:526,527c +3:526,527c + String serviceName = WebUtils.required(request, "serviceName"); + String clusterName = WebUtils.required(request, "clusterName"); +====1 +1:558c + String env = WebUtils.optional(request, "env", StringUtils.EMPTY); +2:528a +3:528a +====1 +1:560,562c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, UtilsAndCommons.getDefaultNamespaceId()); + + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:530,531c +3:530,531c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +====1 +1:566c + ipAddress.setServiceName(dom); +2:535c +3:535c + ipAddress.setServiceName(serviceName); +====1 +1:573,616c + if (virtualClusterDomain == null) { + + Lock lock = domainsManager.addLockIfAbsent(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + Condition condition = domainsManager.addCondtion(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + try { + regDom(request); + } catch (Exception e) { + Loggers.SRV_LOG.error("[REG-SERIVCE] register service failed, service:" + dom, e); + } + } + }); + try { + lock.lock(); + condition.await(5000, TimeUnit.MILLISECONDS); + } finally { + lock.unlock(); + } + + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + } + + if (virtualClusterDomain != null) { + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(request); + } + + if (Loggers.SRV_LOG.isDebugEnabled()) { + Loggers.SRV_LOG.debug("reg-service add ip: {}|{}", dom, ipAddress.toJSON()); + } + + Map stringMap = new HashMap<>(16); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } else { + throw new IllegalArgumentException("dom not found: " + dom); + } +2:542c +3:542c + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:621c + +2:546a +3:546a +====1 +1:628c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, name); +2:553c +3:553c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, name); +====1 +1:633,637c + RaftPeer leader = RaftCore.getLeader(); + if (leader == null) { + throw new IllegalStateException("not leader at present, cannot update"); + } + +2:557a +3:557a +====1 +1:678c + if (cktype.equals(AbstractHealthCheckProcessor.HTTP_PROCESSOR.getType())) { +2:598c +3:598c + if (cktype.equals(HealthCheckType.HTTP.name().toLowerCase())) { +====1 +1:683c + } else if (cktype.equals(AbstractHealthCheckProcessor.TCP_PROCESSOR.getType())) { +2:603c +3:603c + } else if (cktype.equals(HealthCheckType.TCP.name().toLowerCase())) { +====1 +1:687c + } else if (cktype.equals(AbstractHealthCheckProcessor.MYSQL_PROCESSOR.getType())) { +2:607c +3:607c + } else if (cktype.equals(HealthCheckType.MYSQL.name().toLowerCase())) { +====1 +1:779c + domainsManager.easyAddOrReplaceDom(dom); +2:699c +3:699c + serviceManager.addOrReplaceService(dom); +====1 +1:787c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + domainsManager.getDomCount() +2:707c +3:707c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + serviceManager.getDomCount() +====1 +1:792c + +2:711a +3:711a +====1 +1:800c + if (domainsManager.getDomain(namespaceId, dom) == null) { +2:719c +3:719c + if (serviceManager.getService(namespaceId, dom) == null) { +====1 +1:804c + domainsManager.easyRemoveDom(namespaceId, dom); +2:723c +3:723c + serviceManager.easyRemoveDom(namespaceId, dom); +====1 +1:814c + Map> domMap = domainsManager.getAllDomNames(); +2:733c +3:733c + Map> domMap = serviceManager.getAllDomNames(); +====1 +1:818c + Domain domObj = domainsManager.getDomain(namespaceId, dom); +2:737c +3:737c + Domain domObj = serviceManager.getService(namespaceId, dom); +====1 +1:841,979c + @RequestMapping("/onAddIP4Dom") + public String onAddIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer {} tried to publish data but wasn't leader, leader: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: {}, cur-term: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term.get()); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + final String dom = WebUtils.required(request, "dom"); + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", Boolean.FALSE.toString())); + + String ipListString = WebUtils.required(request, "ipList"); + List newIPs = new ArrayList<>(); + + List ipList; + if (Boolean.parseBoolean(WebUtils.optional(request, SwitchEntry.PARAM_JSON, Boolean.FALSE.toString()))) { + newIPs = JSON.parseObject(ipListString, new TypeReference>() { + }); + } else { + ipList = Arrays.asList(ipListString.split(",")); + for (String ip : ipList) { + IpAddress ipAddr = IpAddress.fromJSON(ip); + newIPs.add(ipAddr); + } + } + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domainsManager.getDomain(namespaceId, dom).allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + domainsManager.easyAddIP4Dom(namespaceId, dom, newIPs, term); + + return "ok"; + } + + private void syncOnUpdateIP4Dom(String namespaceId, String dom, Map proxyParams, String action) throws InterruptedException { + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + final CountDownLatch countDownLatch = new CountDownLatch(RaftCore.getPeerSet().majorityCount()); + updateIpPublish(proxyParams, countDownLatch, action); + if (!countDownLatch.await(UtilsAndCommons.MAX_PUBLISH_WAIT_TIME_MILLIS, TimeUnit.MILLISECONDS)) { + Loggers.RAFT.info("data publish failed, key=" + key, ",notify timeout."); + throw new IllegalArgumentException("data publish failed, key=" + key); + } + } + + private void syncOnAddIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void asyncOnAddIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void syncOnRemvIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void asyncOnRemvIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void updateIpPublish(Map proxyParams, CountDownLatch countDownLatch, String action) { + + for (final String peer : RaftCore.getPeerSet().allServersWithoutMySelf()) { + + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + + String server = peer; + + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String api = action.equals("remove") ? "onRemvIP4Dom" : "onAddIP4Dom"; + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/" + api; + + try { + HttpClient.asyncHttpPost(url, null, proxyParams, new AsyncCompletionHandler() { + @Override + public Integer onCompleted(Response response) throws Exception { + if (response.getStatusCode() != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip params: " + proxyParams + + ",code: " + response.getStatusCode() + ", caused " + response.getResponseBody() + + ", server: " + peer); + return 1; + } + if (countDownLatch != null) { + countDownLatch.countDown(); + } + return 0; + } + }); + } catch (Exception e) { + Loggers.SRV_LOG.error(action + "-IP", "failed when publish to peer." + url, e); + } + } + }); + } + } + +2:759a +3:759a +====1 +1:984c + if (Switch.getDisableAddIP()) { +2:764c +3:764c + if (switchDomain.isDisableAddIP()) { +====1 +1:988,989c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +2:767a +3:767a +====1 +1:999a +2:778,784c +3:778,784c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String clusterName = WebUtils.required(request, "clusterName"); + +====1 +1:1005c + ipList = Arrays.asList(ipListString); +2:789a +3:789a +====1 +1:1016,1110c + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/addIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip for dom, caused {}", result1.content); + throw new IllegalArgumentException("failed to add ip for dom, caused " + result1.content); + } + + return "ok"; + } + + final String dom = WebUtils.required(request, "dom"); + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", "false")); + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domain.allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + + String key = UtilsAndCommons.getIPListStoreKey(domain); + + Datum datum = RaftCore.getDatum(key); + if (datum == null) { + try { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).lock(); + datum = RaftCore.getDatum(key); + if (datum == null) { + datum = new Datum(); + datum.key = key; + RaftCore.addDatum(datum); + } + } finally { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).unlock(); + } + } + + long timestamp = RaftCore.getDatum(key).timestamp.get(); + + if (RaftCore.isLeader()) { + try { + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onAddIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnAddIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnAddIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + } +2:800c +3:800c + serviceManager.addInstance(namespaceId, serviceName, clusterName, newIPs.toArray(new IpAddress[newIPs.size()])); +====1 +1:1119c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:809c +3:809c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:1127c + long cacheMillis = Switch.getCacheMillis(dom); +2:817c +3:817c + long cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1131,1132c + if (udpPort > 0 && PushService.canEnablePush(agent)) { + PushService.addClient(namespaceId, dom, +2:821,822c +3:821,822c + if (udpPort > 0 && pushService.canEnablePush(agent)) { + pushService.addClient(namespaceId, dom, +====1 +1:1139c + cacheMillis = Switch.getPushCacheMillis(dom); +2:829c +3:829c + cacheMillis = switchDomain.getPushCacheMillis(dom); +====1 +1:1143c + cacheMillis = Switch.getCacheMillis(dom); +2:833c +3:833c + cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1233,1274c + @RequestMapping("/onRemvIP4Dom") + public void onRemvIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Deleting IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer(" + JSON.toJSONString(clientIP) + ") tried to publish " + + "data but wasn't leader, leader: " + JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: " + + JSON.toJSONString(clientIP) + ", cur-term: " + JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + final String dom = WebUtils.required(request, "dom"); + final String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + List removedIPs = getIpAddresses(request); + + if (CollectionUtils.isEmpty(removedIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + domainsManager.easyRemvIP4Dom(namespaceId, dom, removedIPs, term); + } + +2:922a +3:922a +====1 +1:1279,1280c + if (DistroMapper.getLocalhostIP().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + DistroMapper.getLocalhostIP()); +2:927,928c +3:927,928c + if (NetUtils.localServer().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + NetUtils.localServer()); +====1 +1:1308,1314c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + String dom = WebUtils.required(request, "dom"); + String ipListString = WebUtils.required(request, "ipList"); + + if (Loggers.DEBUG_LOG.isDebugEnabled()) { + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: serviceName: {}, iplist: {}", dom, ipListString); +2:956,957c +3:956,957c + if (switchDomain.isDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); +====1 +1:1323c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments, params: {}", proxyParams); +2:966c +3:966c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: {}", proxyParams); +====1 +1:1326c + List ipList = new ArrayList<>(); +2:969,976c +3:969,976c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String ipListString = WebUtils.required(request, "ipList"); + final List ipList; + List removedIPs = new ArrayList<>(); +====1 +1:1328c + List ipObjList = new ArrayList<>(ipList.size()); +2:977a +3:977a +====1 +1:1330,1331c + ipList = Arrays.asList(ipListString); + ipObjList = JSON.parseObject(ipListString, new TypeReference>() { +2:979c +3:979c + removedIPs = JSON.parseObject(ipListString, new TypeReference>() { +====1 +1:1336,1359c + ipObjList.add(IpAddress.fromJSON(ip)); + } + } + + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/remvIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to remove ip for dom, caused: {}", result1.content); + throw new IllegalArgumentException("failed to remove ip for dom, caused " + result1.content); +2:984,985c +3:984,985c + IpAddress ipAddr = IpAddress.fromJSON(ip); + removedIPs.add(ipAddr); +====1 +1:1361,1379c + + return "ok"; + } + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + if (CollectionUtils.isEmpty(ipObjList)) { + throw new IllegalArgumentException("Empty ip list"); + } + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + long timestamp = 1; + if (RaftCore.getDatum(key) != null) { + timestamp = RaftCore.getDatum(key).timestamp.get(); +2:986a +3:986a +====1 +1:1382,1412c + if (RaftCore.isLeader()) { + + try { + + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onRemvIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnRemvIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnRemvIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + Loggers.EVT_LOG.info("dom: {} {POS} {IP-REMV} new: {} operatorIP: {}", + dom, ipListString, WebUtils.optional(request, "clientIP", "unknown")); + } +2:989c +3:989c + serviceManager.removeInstance(namespaceId, serviceName, removedIPs.toArray(new IpAddress[removedIPs.size()])); +====1 +1:1426,1428c + int failedPushCount = PushService.getFailedPushCount(); + result.put("succeed", PushService.getTotalPush() - failedPushCount); + result.put("total", PushService.getTotalPush()); +2:1003,1005c +3:1003,1005c + int failedPushCount = pushService.getFailedPushCount(); + result.put("succeed", pushService.getTotalPush() - failedPushCount); + result.put("total", pushService.getTotalPush()); +====1 +1:1430,1431c + if (PushService.getTotalPush() > 0) { + result.put("ratio", ((float) PushService.getTotalPush() - failedPushCount) / PushService.getTotalPush()); +2:1007,1008c +3:1007,1008c + if (pushService.getTotalPush() > 0) { + result.put("ratio", ((float) pushService.getTotalPush() - failedPushCount) / pushService.getTotalPush()); +====1 +1:1457,1459c + + ReentrantLock lock = new ReentrantLock(); + +2:1033a +3:1033a +====1 +1:1463a +2:1038,1039c +3:1038,1039c + String entry = WebUtils.required(request, "entry"); + String value = WebUtils.required(request, "value"); +====1 +1:1465,1846c + if (!RaftCore.isLeader() && !debug) { + Map tmpParams = new HashMap<>(16); + for (Map.Entry entry : request.getParameterMap().entrySet()) { + tmpParams.put(entry.getKey(), entry.getValue()[0]); + } + + RaftProxy.proxyGET(UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/updateSwitch", tmpParams); + return "ok"; + } + + try { + lock.lock(); + String entry = WebUtils.required(request, "entry"); + + Datum datum = RaftCore.getDatum(UtilsAndCommons.DOMAINS_DATA_ID_PRE + UtilsAndCommons.SWITCH_DOMAIN_NAME); + SwitchDomain switchDomain = null; + + if (datum != null) { + switchDomain = JSON.parseObject(datum.value, SwitchDomain.class); + } else { + Loggers.SRV_LOG.warn("datum: {}{} is null", UtilsAndCommons.DOMAINS_DATA_ID_PRE, UtilsAndCommons.SWITCH_DOMAIN_NAME); + } + + if (SwitchEntry.BATCH.equals(entry)) { + //batch update + SwitchDomain dom = JSON.parseObject(WebUtils.required(request, "json"), SwitchDomain.class); + dom.setEnableStandalone(Switch.isEnableStandalone()); + if (dom.httpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN + || dom.tcpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN) { + + throw new IllegalArgumentException("min check time for http or tcp is too small(<500)"); + } + + if (dom.httpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX + || dom.tcpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX) { + + throw new IllegalArgumentException("max check time for http or tcp is too small(<3000)"); + } + + if (dom.httpHealthParams.getFactor() < 0 + || dom.httpHealthParams.getFactor() > 1 + || dom.tcpHealthParams.getFactor() < 0 + || dom.tcpHealthParams.getFactor() > 1) { + + throw new IllegalArgumentException("malformed factor"); + } + + Switch.setDom(dom); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (switchDomain != null) { + Switch.setDom(switchDomain); + } + + if (entry.equals(SwitchEntry.DISTRO_THRESHOLD)) { + Float threshold = Float.parseFloat(WebUtils.required(request, "distroThreshold")); + + if (threshold <= 0) { + throw new IllegalArgumentException("distroThreshold can not be zero or negative: " + threshold); + } + + Switch.setDistroThreshold(threshold); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + + if (entry.equals(SwitchEntry.ENABLE_ALL_DOM_NAME_CACHE)) { + Boolean enable = Boolean.parseBoolean(WebUtils.required(request, "enableAllDomNameCache")); + Switch.setAllDomNameCache(enable); + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.INCREMENTAL_LIST)) { + String action = WebUtils.required(request, "action"); + List doms = Arrays.asList(WebUtils.required(request, "incrementalList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getIncrementalList().addAll(doms); + } else if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getIncrementalList().removeAll(doms); + } else { + throw new IllegalArgumentException("action is not allowed: " + action); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_WHITLE_LIST)) { + String action = WebUtils.required(request, "action"); + List whiteList = Arrays.asList(WebUtils.required(request, "healthCheckWhiteList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getHealthCheckWhiteList().addAll(whiteList); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getHealthCheckWhiteList().removeAll(whiteList); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.CLIENT_BEAT_INTERVAL)) { + long clientBeatInterval = Long.parseLong(WebUtils.required(request, "clientBeatInterval")); + Switch.setClientBeatInterval(clientBeatInterval); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setPushJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setPushPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setPushCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_GO, type)) { + Switch.setPushGoVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.TRAFFIC_SCHEDULING_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setTrafficSchedulingJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setTrafficSchedulingPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setTrafficSchedulingCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_TENGINE, type)) { + Switch.setTrafficSchedulingTengineVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_PUSH_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min cache time for http or tcp is too small(<10000)"); + } + + Switch.setPushCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + // extremely careful while modifying this, cause it will affect all clients without pushing enabled + if (entry.equals(SwitchEntry.DEFAULT_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min default cache time is too small(<1000)"); + } + + Switch.setCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.MASTERS)) { + List masters = Arrays.asList(WebUtils.required(request, "names").split(",")); + + Switch.setMasters(masters); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISTRO)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setDistroEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.CHECK)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setHeathCheckEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DEFAULT_HEALTH_CHECK_MODE)) { + String defaultHealthCheckMode = WebUtils.required(request, "mode"); + + Switch.setDefaultHealthCheckMode(defaultHealthCheckMode); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DOM_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_DOM_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("domStatusSynchronizationPeriodMillis is too small(<5000)"); + } + + Switch.setDomStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SERVER_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_SERVER_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("serverStatusSynchronizationPeriodMillis is too small(<15000)"); + } + + Switch.setServerStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_TIMES)) { + Integer times = Integer.parseInt(WebUtils.required(request, "times")); + + Switch.setCheckTimes(times); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISABLE_ADD_IP)) { + boolean disableAddIP = Boolean.parseBoolean(WebUtils.required(request, "disableAddIP")); + + Switch.setDisableAddIP(disableAddIP); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.ENABLE_CACHE)) { + boolean enableCache = Boolean.parseBoolean(WebUtils.required(request, "enableCache")); + + Switch.setEnableCache(enableCache); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SEND_BEAT_ONLY)) { + boolean sendBeatOnly = Boolean.parseBoolean(WebUtils.required(request, "sendBeatOnly")); + + Switch.setSendBeatOnly(sendBeatOnly); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.LIMITED_URL_MAP)) { + Map limitedUrlMap = new HashMap<>(16); + String limitedUrls = WebUtils.required(request, "limitedUrls"); + + if (!StringUtils.isEmpty(limitedUrls)) { + String[] entries = limitedUrls.split(","); + for (int i = 0; i < entries.length; i++) { + String[] parts = entries[i].split(":"); + if (parts.length < 2) { + throw new IllegalArgumentException("invalid input for limited urls"); + } + + String limitedUrl = parts[0]; + if (StringUtils.isEmpty(limitedUrl)) { + throw new IllegalArgumentException("url can not be empty, url: " + limitedUrl); + } + + int statusCode = Integer.parseInt(parts[1]); + if (statusCode <= 0) { + throw new IllegalArgumentException("illegal normal status code: " + statusCode); + } + + limitedUrlMap.put(limitedUrl, statusCode); + + } + + Switch.setLimitedUrlMap(limitedUrlMap); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.ENABLE_STANDALONE)) { + String enable = WebUtils.required(request, "enableStandalone"); + + if (!StringUtils.isNotEmpty(enable)) { + Switch.setEnableStandalone(Boolean.parseBoolean(enable)); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + + throw new IllegalArgumentException("update entry not found: " + entry); + } finally { + lock.unlock(); + } + +2:1041c +3:1041c + switchManager.update(entry, value, debug); +====1 +1:1847a +2:1043c +3:1043c + return "ok"; +====1 +1:1858c + return JSON.parseObject(Switch.getDom().toJSON()); +2:1054c +3:1054c + return JSON.parseObject(switchDomain.toJSON()); +====2 +1:1902a +3:1098a +2:1099,1105c + Map> domMap = domainsManager.getAllDomNames(); + JSONObject result = new JSONObject(); + // For old DNS-F client: + String dnsfVersion = "1.0.1"; + String agent = request.getHeader("Client-Version"); + ClientInfo clientInfo = new ClientInfo(agent); + if (clientInfo.type == ClientInfo.ClientType.DNS && clientInfo.version.compareTo(VersionUtil.parseVersion(dnsfVersion)) <= 0) { +====2 +1:1904c +3:1100c + Map> doms = new HashMap<>(16); +2:1107,1108c + List doms = new ArrayList(); + Set domSet = null; +==== +1:1906c + Map> domMap = domainsManager.getAllDomNames(); +2:1110,1135c + <<<<<<< HEAD + Map> domMap = serviceManager.getAllDomNames(); + ||||||| c863cbcde + Map> domMap = domainsManager.getAllDomNames(); + ======= + if (domMap.containsKey(Constants.REQUEST_PARAM_DEFAULT_NAMESPACE_ID)) { + domSet = domMap.get(Constants.REQUEST_PARAM_DEFAULT_NAMESPACE_ID); + } + >>>>>>> TEMP_RIGHT_BRANCH + + if (CollectionUtils.isEmpty(domSet)) { + result.put("doms", new HashSet<>()); + result.put("count", 0); + return result; + } + + for (String dom : domSet) { + if (DistroMapper.responsible(dom) || !responsibleOnly) { + doms.add(dom); + } + } + + result.put("doms", doms); + result.put("count", doms.size()); + return result; + } +3:1102c + Map> domMap = serviceManager.getAllDomNames(); +====2 +1:1907a +3:1103a +2:1137,1138c + Map> doms = new HashMap<>(16); + int count = 0; +====1 +1:1911c + if (DistroMapper.responsible(dom) || !responsibleOnly) { +2:1142c +3:1107c + if (distroMapper.responsible(dom) || !responsibleOnly) { +====2 +1:1914a +3:1110a +2:1146c + count += doms.get(namespaceId).size(); +====2 +1:1917,1918c +3:1113,1114c + JSONObject result = new JSONObject(); + +2:1148a +====2 +1:1920c +3:1116c + result.put("count", doms.size()); +2:1150c + result.put("count", count); +====1 +1:1933,1934c + List doms + = domainsManager.searchDomains(namespaceId, ".*" + expr + ".*"); +2:1163,1164c +3:1129,1130c + List doms + = serviceManager.searchDomains(namespaceId, ".*" + expr + ".*"); +====1 +1:1980c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1210c +3:1176c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2063c + domainsManager.easyAddOrReplaceDom(domObj); +2:1293c +3:1259c + serviceManager.addOrReplaceService(domObj); +====1 +1:2082c + result.put("status", DistroMapper.getDistroConfig()); +2:1312c +3:1278c + result.put("status", distroMapper.getDistroConfig()); +====1 +1:2087c + DistroMapper.clean(); +2:1317c +3:1283c + distroMapper.clean(); +====1 +1:2099,2100c + int domCount = domainsManager.getDomCount(); + int ipCount = domainsManager.getInstanceCount(); +2:1329,1330c +3:1295,1296c + int domCount = serviceManager.getDomCount(); + int ipCount = serviceManager.getInstanceCount(); +====1 +1:2102,2103c + int responsibleDomCount = domainsManager.getResponsibleDomCount(); + int responsibleIPCount = domainsManager.getResponsibleIPCount(); +2:1332,1333c +3:1298,1299c + int responsibleDomCount = serviceManager.getResponsibleDomCount(); + int responsibleIPCount = serviceManager.getResponsibleIPCount(); +====1 +1:2112c + result.put("notifyTask", RaftCore.notifier.getTaskSize()); +2:1341a +3:1307a +====1 +1:2198c + DistroMapper.onReceiveServerStatus(serverStatus); +2:1427c +3:1393c + distroMapper.onReceiveServerStatus(serverStatus); +====1 +1:2209c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1438c +3:1404c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2229c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1458c +3:1424c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2237c + result.put("responsibleServer", DistroMapper.mapSrv(dom)); +2:1466c +3:1432c + result.put("responsibleServer", distroMapper.mapSrv(dom)); +====1 +1:2246c + result.put("healthyList", DistroMapper.getHealthyList()); +2:1475c +3:1441c + result.put("healthyList", distroMapper.getHealthyList()); +====1 +1:2256c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1485c +3:1451c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2264c + result.put("responsible", DistroMapper.responsible(dom)); +2:1493c +3:1459c + result.put("responsible", distroMapper.responsible(dom)); +====1 +1:2275c + if (!NamingProxy.getServers().contains(serverIP)) { +2:1504c +3:1470c + if (!serverListManager.contains(serverIP)) { +====1 +1:2280c + DomainsManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, DomainsManager.DomainChecksum.class); +2:1509c +3:1475c + ServiceManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, ServiceManager.DomainChecksum.class); +====1 +1:2292c + Domain domain = domainsManager.getDomain(checksums.namespaceId, dom); +2:1521c +3:1487c + Domain domain = serviceManager.getService(checksums.namespaceId, dom); +====1 +1:2305c + domainsManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +2:1534c +3:1500c + serviceManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +====1 +1:2354c + pac.put("checkServer", DistroMapper.mapSrvName(vDom.getName())); +2:1583c +3:1549c + pac.put("checkServer", distroMapper.mapSrvName(vDom.getName())); +====1 +1:2432,2433c + public void setDomainsManager(DomainsManager domainsManager) { + this.domainsManager = domainsManager; +2:1661,1662c +3:1627,1628c + public void setServiceManager(ServiceManager serviceManager) { + this.serviceManager = serviceManager; diff --git a/src/python/merge_conflict_analysis_diffs/128/gitmerge_recursive_patience/diff_ApiCommands.java.txt b/src/python/merge_conflict_analysis_diffs/128/gitmerge_recursive_patience/diff_ApiCommands.java.txt new file mode 100644 index 0000000000..6d144cc367 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/128/gitmerge_recursive_patience/diff_ApiCommands.java.txt @@ -0,0 +1,1549 @@ +====1 +1:27a +2:28c +3:28c + import com.alibaba.nacos.naming.cluster.ServerListManager; +====1 +1:35,42c + import com.alibaba.nacos.naming.raft.Datum; + import com.alibaba.nacos.naming.raft.RaftCore; + import com.alibaba.nacos.naming.raft.RaftPeer; + import com.alibaba.nacos.naming.raft.RaftProxy; + import com.ning.http.client.AsyncCompletionHandler; + import com.ning.http.client.Response; + import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; + import org.apache.catalina.util.ParameterMap; +2:35a +3:35a +====1 +1:65,69c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.TimeUnit; + import java.util.concurrent.locks.Condition; + import java.util.concurrent.locks.Lock; + import java.util.concurrent.locks.ReentrantLock; +2:57a +3:57a +====1 +1:84c + protected DomainsManager domainsManager; +2:72,87c +3:72,87c + protected ServiceManager serviceManager; + + @Autowired + private SwitchManager switchManager; + + @Autowired + private ServerListManager serverListManager; + + @Autowired + private SwitchDomain switchDomain; + + @Autowired + private PushService pushService; + + @Autowired + private DistroMapper distroMapper; +====1 +1:112c + result.put("cacheMillis", Switch.getPushCacheMillis(client.getDom())); +2:115c +3:115c + result.put("cacheMillis", switchDomain.getPushCacheMillis(client.getDom())); +====1 +1:126c + Domain dom = domainsManager.getDomain(namespaceId, name); +2:129c +3:129c + Domain dom = serviceManager.getService(namespaceId, name); +====1 +1:138c + result.put("count", domainsManager.getDomCount()); +2:141c +3:141c + result.put("count", serviceManager.getDomCount()); +====1 +1:151c + = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:154c +3:154c + = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:182c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:185c +3:185c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:212c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:215c +3:215c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:273c + if (domainsManager.getDomain(namespaceId, dom) != null) { +2:276c +3:276c + if (serviceManager.getService(namespaceId, dom) != null) { +====1 +1:292,294c + String dom = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(dom)) { + dom = WebUtils.required(request, "dom"); +2:295,297c +3:295,297c + String serviceName = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(serviceName)) { + serviceName = WebUtils.required(request, "dom"); +====1 +1:296,297c + String app; + app = WebUtils.optional(request, "app", StringUtils.EMPTY); +2:299c +3:299c + +====1 +1:305c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, dom); +2:307c +3:307c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, serviceName); +====1 +1:308,314c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + Map stringMap = new HashMap<>(16); + stringMap.put(Constants.REQUEST_PARAM_SERVICE_NAME, Arrays.asList(dom).toArray(new String[1])); + stringMap.put("enableClientBeat", Arrays.asList("true").toArray(new String[1])); + stringMap.put("cktype", Arrays.asList("TCP").toArray(new String[1])); + stringMap.put("appName", Arrays.asList(app).toArray(new String[1])); + stringMap.put("clusterName", Arrays.asList(clusterName).toArray(new String[1])); +2:310,311c +3:310,311c + IpAddress ipAddress = serviceManager.getInstance(namespaceId, serviceName, clientBeat.getCluster(), clientBeat.getIp(), + clientBeat.getPort()); +====1 +1:316,319c + //if domain does not exist, register it. + if (virtualClusterDomain == null) { + regDom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("dom not found, register it, dom: {}", dom); +2:313,322c +3:313,322c + if (ipAddress == null) { + ipAddress = new IpAddress(); + ipAddress.setPort(clientBeat.getPort()); + ipAddress.setIp(clientBeat.getIp()); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(serviceName); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:322,325c + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + String ip = clientBeat.getIp(); + int port = clientBeat.getPort(); +2:325c +3:325c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:327,354c + IpAddress ipAddress = new IpAddress(); + ipAddress.setPort(port); + ipAddress.setIp(ip); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(dom); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } + + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", Switch.getClientBeatInterval()); + + if (!virtualClusterDomain.allIPs().contains(ipAddress)) { + + if (!virtualClusterDomain.getEnableClientBeat()) { + return result; + } + + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("ip not found, register it, dom: {}, ip: {}", dom, ipAddress); +2:327,328c +3:327,328c + if (virtualClusterDomain == null) { + throw new NacosException(NacosException.SERVER_ERROR, "service not found: " + serviceName + "@" + namespaceId); +====1 +1:357,359c + if (!DistroMapper.responsible(dom)) { + String server = DistroMapper.mapSrv(dom); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", dom, server); +2:331,333c +3:331,333c + if (!distroMapper.responsible(serviceName)) { + String server = distroMapper.mapSrv(serviceName); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", serviceName, server); +====1 +1:381a +2:356,359c +3:356,359c + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", switchDomain.getClientBeatInterval()); + +====1 +1:385c + +2:362a +3:362a +====1 +1:400c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +2:377c +3:377c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +====1 +1:406c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +2:383c +3:383c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +====1 +1:503c + domainsManager.easyAddOrReplaceDom(domObj); +2:480c +3:480c + serviceManager.addOrReplaceService(domObj); +====1 +1:534c + String dom = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +2:511c +3:511c + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +====1 +1:536c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:513c +3:513c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:541,547c + ParameterMap parameterMap = new ParameterMap<>(); + parameterMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + parameterMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + parameterMap.put("json", Arrays.asList("true").toArray(new String[1])); + parameterMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + return remvIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, parameterMap)); +2:518c +3:518c + serviceManager.removeInstance(namespaceId, serviceName, ipAddress); +====1 +1:548a +2:520c +3:520c + return "ok"; +====1 +1:551c + @SuppressFBWarnings("JLM_JSR166_LOCK_MONITORENTER") +2:522a +3:522a +====1 +1:555,556c + String dom = WebUtils.required(request, "serviceName"); + String tenant = WebUtils.optional(request, "tid", StringUtils.EMPTY); +2:526,527c +3:526,527c + String serviceName = WebUtils.required(request, "serviceName"); + String clusterName = WebUtils.required(request, "clusterName"); +====1 +1:558c + String env = WebUtils.optional(request, "env", StringUtils.EMPTY); +2:528a +3:528a +====1 +1:560,562c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, UtilsAndCommons.getDefaultNamespaceId()); + + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:530,531c +3:530,531c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +====1 +1:566c + ipAddress.setServiceName(dom); +2:535c +3:535c + ipAddress.setServiceName(serviceName); +====1 +1:573,616c + if (virtualClusterDomain == null) { + + Lock lock = domainsManager.addLockIfAbsent(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + Condition condition = domainsManager.addCondtion(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + try { + regDom(request); + } catch (Exception e) { + Loggers.SRV_LOG.error("[REG-SERIVCE] register service failed, service:" + dom, e); + } + } + }); + try { + lock.lock(); + condition.await(5000, TimeUnit.MILLISECONDS); + } finally { + lock.unlock(); + } + + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + } + + if (virtualClusterDomain != null) { + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(request); + } + + if (Loggers.SRV_LOG.isDebugEnabled()) { + Loggers.SRV_LOG.debug("reg-service add ip: {}|{}", dom, ipAddress.toJSON()); + } + + Map stringMap = new HashMap<>(16); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } else { + throw new IllegalArgumentException("dom not found: " + dom); + } +2:542c +3:542c + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:621c + +2:546a +3:546a +====1 +1:628c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, name); +2:553c +3:553c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, name); +====1 +1:633,637c + RaftPeer leader = RaftCore.getLeader(); + if (leader == null) { + throw new IllegalStateException("not leader at present, cannot update"); + } + +2:557a +3:557a +====1 +1:678c + if (cktype.equals(AbstractHealthCheckProcessor.HTTP_PROCESSOR.getType())) { +2:598c +3:598c + if (cktype.equals(HealthCheckType.HTTP.name().toLowerCase())) { +====1 +1:683c + } else if (cktype.equals(AbstractHealthCheckProcessor.TCP_PROCESSOR.getType())) { +2:603c +3:603c + } else if (cktype.equals(HealthCheckType.TCP.name().toLowerCase())) { +====1 +1:687c + } else if (cktype.equals(AbstractHealthCheckProcessor.MYSQL_PROCESSOR.getType())) { +2:607c +3:607c + } else if (cktype.equals(HealthCheckType.MYSQL.name().toLowerCase())) { +====1 +1:779c + domainsManager.easyAddOrReplaceDom(dom); +2:699c +3:699c + serviceManager.addOrReplaceService(dom); +====1 +1:787c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + domainsManager.getDomCount() +2:707c +3:707c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + serviceManager.getDomCount() +====1 +1:792c + +2:711a +3:711a +====1 +1:800c + if (domainsManager.getDomain(namespaceId, dom) == null) { +2:719c +3:719c + if (serviceManager.getService(namespaceId, dom) == null) { +====1 +1:804c + domainsManager.easyRemoveDom(namespaceId, dom); +2:723c +3:723c + serviceManager.easyRemoveDom(namespaceId, dom); +====1 +1:814c + Map> domMap = domainsManager.getAllDomNames(); +2:733c +3:733c + Map> domMap = serviceManager.getAllDomNames(); +====1 +1:818c + Domain domObj = domainsManager.getDomain(namespaceId, dom); +2:737c +3:737c + Domain domObj = serviceManager.getService(namespaceId, dom); +====1 +1:841,979c + @RequestMapping("/onAddIP4Dom") + public String onAddIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer {} tried to publish data but wasn't leader, leader: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: {}, cur-term: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term.get()); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + final String dom = WebUtils.required(request, "dom"); + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", Boolean.FALSE.toString())); + + String ipListString = WebUtils.required(request, "ipList"); + List newIPs = new ArrayList<>(); + + List ipList; + if (Boolean.parseBoolean(WebUtils.optional(request, SwitchEntry.PARAM_JSON, Boolean.FALSE.toString()))) { + newIPs = JSON.parseObject(ipListString, new TypeReference>() { + }); + } else { + ipList = Arrays.asList(ipListString.split(",")); + for (String ip : ipList) { + IpAddress ipAddr = IpAddress.fromJSON(ip); + newIPs.add(ipAddr); + } + } + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domainsManager.getDomain(namespaceId, dom).allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + domainsManager.easyAddIP4Dom(namespaceId, dom, newIPs, term); + + return "ok"; + } + + private void syncOnUpdateIP4Dom(String namespaceId, String dom, Map proxyParams, String action) throws InterruptedException { + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + final CountDownLatch countDownLatch = new CountDownLatch(RaftCore.getPeerSet().majorityCount()); + updateIpPublish(proxyParams, countDownLatch, action); + if (!countDownLatch.await(UtilsAndCommons.MAX_PUBLISH_WAIT_TIME_MILLIS, TimeUnit.MILLISECONDS)) { + Loggers.RAFT.info("data publish failed, key=" + key, ",notify timeout."); + throw new IllegalArgumentException("data publish failed, key=" + key); + } + } + + private void syncOnAddIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void asyncOnAddIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void syncOnRemvIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void asyncOnRemvIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void updateIpPublish(Map proxyParams, CountDownLatch countDownLatch, String action) { + + for (final String peer : RaftCore.getPeerSet().allServersWithoutMySelf()) { + + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + + String server = peer; + + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String api = action.equals("remove") ? "onRemvIP4Dom" : "onAddIP4Dom"; + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/" + api; + + try { + HttpClient.asyncHttpPost(url, null, proxyParams, new AsyncCompletionHandler() { + @Override + public Integer onCompleted(Response response) throws Exception { + if (response.getStatusCode() != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip params: " + proxyParams + + ",code: " + response.getStatusCode() + ", caused " + response.getResponseBody() + + ", server: " + peer); + return 1; + } + if (countDownLatch != null) { + countDownLatch.countDown(); + } + return 0; + } + }); + } catch (Exception e) { + Loggers.SRV_LOG.error(action + "-IP", "failed when publish to peer." + url, e); + } + } + }); + } + } + +2:759a +3:759a +====1 +1:984c + if (Switch.getDisableAddIP()) { +2:764c +3:764c + if (switchDomain.isDisableAddIP()) { +====1 +1:988,989c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +2:767a +3:767a +====1 +1:999a +2:778,784c +3:778,784c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String clusterName = WebUtils.required(request, "clusterName"); + +====1 +1:1005c + ipList = Arrays.asList(ipListString); +2:789a +3:789a +====1 +1:1016,1110c + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/addIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip for dom, caused {}", result1.content); + throw new IllegalArgumentException("failed to add ip for dom, caused " + result1.content); + } + + return "ok"; + } + + final String dom = WebUtils.required(request, "dom"); + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", "false")); + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domain.allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + + String key = UtilsAndCommons.getIPListStoreKey(domain); + + Datum datum = RaftCore.getDatum(key); + if (datum == null) { + try { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).lock(); + datum = RaftCore.getDatum(key); + if (datum == null) { + datum = new Datum(); + datum.key = key; + RaftCore.addDatum(datum); + } + } finally { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).unlock(); + } + } + + long timestamp = RaftCore.getDatum(key).timestamp.get(); + + if (RaftCore.isLeader()) { + try { + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onAddIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnAddIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnAddIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + } +2:800c +3:800c + serviceManager.addInstance(namespaceId, serviceName, clusterName, newIPs.toArray(new IpAddress[newIPs.size()])); +====1 +1:1119c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:809c +3:809c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:1127c + long cacheMillis = Switch.getCacheMillis(dom); +2:817c +3:817c + long cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1131,1132c + if (udpPort > 0 && PushService.canEnablePush(agent)) { + PushService.addClient(namespaceId, dom, +2:821,822c +3:821,822c + if (udpPort > 0 && pushService.canEnablePush(agent)) { + pushService.addClient(namespaceId, dom, +====1 +1:1139c + cacheMillis = Switch.getPushCacheMillis(dom); +2:829c +3:829c + cacheMillis = switchDomain.getPushCacheMillis(dom); +====1 +1:1143c + cacheMillis = Switch.getCacheMillis(dom); +2:833c +3:833c + cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1233,1274c + @RequestMapping("/onRemvIP4Dom") + public void onRemvIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Deleting IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer(" + JSON.toJSONString(clientIP) + ") tried to publish " + + "data but wasn't leader, leader: " + JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: " + + JSON.toJSONString(clientIP) + ", cur-term: " + JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + final String dom = WebUtils.required(request, "dom"); + final String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + List removedIPs = getIpAddresses(request); + + if (CollectionUtils.isEmpty(removedIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + domainsManager.easyRemvIP4Dom(namespaceId, dom, removedIPs, term); + } + +2:922a +3:922a +====1 +1:1279,1280c + if (DistroMapper.getLocalhostIP().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + DistroMapper.getLocalhostIP()); +2:927,928c +3:927,928c + if (NetUtils.localServer().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + NetUtils.localServer()); +====1 +1:1308,1314c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + String dom = WebUtils.required(request, "dom"); + String ipListString = WebUtils.required(request, "ipList"); + + if (Loggers.DEBUG_LOG.isDebugEnabled()) { + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: serviceName: {}, iplist: {}", dom, ipListString); +2:956,957c +3:956,957c + if (switchDomain.isDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); +====1 +1:1323c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments, params: {}", proxyParams); +2:966c +3:966c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: {}", proxyParams); +====1 +1:1326c + List ipList = new ArrayList<>(); +2:969,976c +3:969,976c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String ipListString = WebUtils.required(request, "ipList"); + final List ipList; + List removedIPs = new ArrayList<>(); +====1 +1:1328c + List ipObjList = new ArrayList<>(ipList.size()); +2:977a +3:977a +====1 +1:1330,1331c + ipList = Arrays.asList(ipListString); + ipObjList = JSON.parseObject(ipListString, new TypeReference>() { +2:979c +3:979c + removedIPs = JSON.parseObject(ipListString, new TypeReference>() { +====1 +1:1336,1359c + ipObjList.add(IpAddress.fromJSON(ip)); + } + } + + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/remvIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to remove ip for dom, caused: {}", result1.content); + throw new IllegalArgumentException("failed to remove ip for dom, caused " + result1.content); +2:984,985c +3:984,985c + IpAddress ipAddr = IpAddress.fromJSON(ip); + removedIPs.add(ipAddr); +====1 +1:1361,1379c + + return "ok"; + } + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + if (CollectionUtils.isEmpty(ipObjList)) { + throw new IllegalArgumentException("Empty ip list"); + } + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + long timestamp = 1; + if (RaftCore.getDatum(key) != null) { + timestamp = RaftCore.getDatum(key).timestamp.get(); +2:986a +3:986a +====1 +1:1382,1412c + if (RaftCore.isLeader()) { + + try { + + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onRemvIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnRemvIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnRemvIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + Loggers.EVT_LOG.info("dom: {} {POS} {IP-REMV} new: {} operatorIP: {}", + dom, ipListString, WebUtils.optional(request, "clientIP", "unknown")); + } +2:989c +3:989c + serviceManager.removeInstance(namespaceId, serviceName, removedIPs.toArray(new IpAddress[removedIPs.size()])); +====1 +1:1426,1428c + int failedPushCount = PushService.getFailedPushCount(); + result.put("succeed", PushService.getTotalPush() - failedPushCount); + result.put("total", PushService.getTotalPush()); +2:1003,1005c +3:1003,1005c + int failedPushCount = pushService.getFailedPushCount(); + result.put("succeed", pushService.getTotalPush() - failedPushCount); + result.put("total", pushService.getTotalPush()); +====1 +1:1430,1431c + if (PushService.getTotalPush() > 0) { + result.put("ratio", ((float) PushService.getTotalPush() - failedPushCount) / PushService.getTotalPush()); +2:1007,1008c +3:1007,1008c + if (pushService.getTotalPush() > 0) { + result.put("ratio", ((float) pushService.getTotalPush() - failedPushCount) / pushService.getTotalPush()); +====1 +1:1457,1459c + + ReentrantLock lock = new ReentrantLock(); + +2:1033a +3:1033a +====1 +1:1463a +2:1038,1039c +3:1038,1039c + String entry = WebUtils.required(request, "entry"); + String value = WebUtils.required(request, "value"); +====1 +1:1465,1846c + if (!RaftCore.isLeader() && !debug) { + Map tmpParams = new HashMap<>(16); + for (Map.Entry entry : request.getParameterMap().entrySet()) { + tmpParams.put(entry.getKey(), entry.getValue()[0]); + } + + RaftProxy.proxyGET(UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/updateSwitch", tmpParams); + return "ok"; + } + + try { + lock.lock(); + String entry = WebUtils.required(request, "entry"); + + Datum datum = RaftCore.getDatum(UtilsAndCommons.DOMAINS_DATA_ID_PRE + UtilsAndCommons.SWITCH_DOMAIN_NAME); + SwitchDomain switchDomain = null; + + if (datum != null) { + switchDomain = JSON.parseObject(datum.value, SwitchDomain.class); + } else { + Loggers.SRV_LOG.warn("datum: {}{} is null", UtilsAndCommons.DOMAINS_DATA_ID_PRE, UtilsAndCommons.SWITCH_DOMAIN_NAME); + } + + if (SwitchEntry.BATCH.equals(entry)) { + //batch update + SwitchDomain dom = JSON.parseObject(WebUtils.required(request, "json"), SwitchDomain.class); + dom.setEnableStandalone(Switch.isEnableStandalone()); + if (dom.httpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN + || dom.tcpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN) { + + throw new IllegalArgumentException("min check time for http or tcp is too small(<500)"); + } + + if (dom.httpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX + || dom.tcpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX) { + + throw new IllegalArgumentException("max check time for http or tcp is too small(<3000)"); + } + + if (dom.httpHealthParams.getFactor() < 0 + || dom.httpHealthParams.getFactor() > 1 + || dom.tcpHealthParams.getFactor() < 0 + || dom.tcpHealthParams.getFactor() > 1) { + + throw new IllegalArgumentException("malformed factor"); + } + + Switch.setDom(dom); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (switchDomain != null) { + Switch.setDom(switchDomain); + } + + if (entry.equals(SwitchEntry.DISTRO_THRESHOLD)) { + Float threshold = Float.parseFloat(WebUtils.required(request, "distroThreshold")); + + if (threshold <= 0) { + throw new IllegalArgumentException("distroThreshold can not be zero or negative: " + threshold); + } + + Switch.setDistroThreshold(threshold); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + + if (entry.equals(SwitchEntry.ENABLE_ALL_DOM_NAME_CACHE)) { + Boolean enable = Boolean.parseBoolean(WebUtils.required(request, "enableAllDomNameCache")); + Switch.setAllDomNameCache(enable); + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.INCREMENTAL_LIST)) { + String action = WebUtils.required(request, "action"); + List doms = Arrays.asList(WebUtils.required(request, "incrementalList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getIncrementalList().addAll(doms); + } else if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getIncrementalList().removeAll(doms); + } else { + throw new IllegalArgumentException("action is not allowed: " + action); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_WHITLE_LIST)) { + String action = WebUtils.required(request, "action"); + List whiteList = Arrays.asList(WebUtils.required(request, "healthCheckWhiteList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getHealthCheckWhiteList().addAll(whiteList); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getHealthCheckWhiteList().removeAll(whiteList); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.CLIENT_BEAT_INTERVAL)) { + long clientBeatInterval = Long.parseLong(WebUtils.required(request, "clientBeatInterval")); + Switch.setClientBeatInterval(clientBeatInterval); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setPushJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setPushPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setPushCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_GO, type)) { + Switch.setPushGoVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.TRAFFIC_SCHEDULING_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setTrafficSchedulingJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setTrafficSchedulingPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setTrafficSchedulingCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_TENGINE, type)) { + Switch.setTrafficSchedulingTengineVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_PUSH_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min cache time for http or tcp is too small(<10000)"); + } + + Switch.setPushCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + // extremely careful while modifying this, cause it will affect all clients without pushing enabled + if (entry.equals(SwitchEntry.DEFAULT_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min default cache time is too small(<1000)"); + } + + Switch.setCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.MASTERS)) { + List masters = Arrays.asList(WebUtils.required(request, "names").split(",")); + + Switch.setMasters(masters); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISTRO)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setDistroEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.CHECK)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setHeathCheckEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DEFAULT_HEALTH_CHECK_MODE)) { + String defaultHealthCheckMode = WebUtils.required(request, "mode"); + + Switch.setDefaultHealthCheckMode(defaultHealthCheckMode); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DOM_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_DOM_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("domStatusSynchronizationPeriodMillis is too small(<5000)"); + } + + Switch.setDomStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SERVER_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_SERVER_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("serverStatusSynchronizationPeriodMillis is too small(<15000)"); + } + + Switch.setServerStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_TIMES)) { + Integer times = Integer.parseInt(WebUtils.required(request, "times")); + + Switch.setCheckTimes(times); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISABLE_ADD_IP)) { + boolean disableAddIP = Boolean.parseBoolean(WebUtils.required(request, "disableAddIP")); + + Switch.setDisableAddIP(disableAddIP); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.ENABLE_CACHE)) { + boolean enableCache = Boolean.parseBoolean(WebUtils.required(request, "enableCache")); + + Switch.setEnableCache(enableCache); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SEND_BEAT_ONLY)) { + boolean sendBeatOnly = Boolean.parseBoolean(WebUtils.required(request, "sendBeatOnly")); + + Switch.setSendBeatOnly(sendBeatOnly); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.LIMITED_URL_MAP)) { + Map limitedUrlMap = new HashMap<>(16); + String limitedUrls = WebUtils.required(request, "limitedUrls"); + + if (!StringUtils.isEmpty(limitedUrls)) { + String[] entries = limitedUrls.split(","); + for (int i = 0; i < entries.length; i++) { + String[] parts = entries[i].split(":"); + if (parts.length < 2) { + throw new IllegalArgumentException("invalid input for limited urls"); + } + + String limitedUrl = parts[0]; + if (StringUtils.isEmpty(limitedUrl)) { + throw new IllegalArgumentException("url can not be empty, url: " + limitedUrl); + } + + int statusCode = Integer.parseInt(parts[1]); + if (statusCode <= 0) { + throw new IllegalArgumentException("illegal normal status code: " + statusCode); + } + + limitedUrlMap.put(limitedUrl, statusCode); + + } + + Switch.setLimitedUrlMap(limitedUrlMap); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.ENABLE_STANDALONE)) { + String enable = WebUtils.required(request, "enableStandalone"); + + if (!StringUtils.isNotEmpty(enable)) { + Switch.setEnableStandalone(Boolean.parseBoolean(enable)); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + + throw new IllegalArgumentException("update entry not found: " + entry); + } finally { + lock.unlock(); + } + +2:1041c +3:1041c + switchManager.update(entry, value, debug); +====1 +1:1847a +2:1043c +3:1043c + return "ok"; +====1 +1:1858c + return JSON.parseObject(Switch.getDom().toJSON()); +2:1054c +3:1054c + return JSON.parseObject(switchDomain.toJSON()); +====2 +1:1902a +3:1098a +2:1099,1129c + Map> domMap = domainsManager.getAllDomNames(); + JSONObject result = new JSONObject(); + // For old DNS-F client: + String dnsfVersion = "1.0.1"; + String agent = request.getHeader("Client-Version"); + ClientInfo clientInfo = new ClientInfo(agent); + if (clientInfo.type == ClientInfo.ClientType.DNS && clientInfo.version.compareTo(VersionUtil.parseVersion(dnsfVersion)) <= 0) { + + List doms = new ArrayList(); + Set domSet = null; + + if (domMap.containsKey(Constants.REQUEST_PARAM_DEFAULT_NAMESPACE_ID)) { + domSet = domMap.get(Constants.REQUEST_PARAM_DEFAULT_NAMESPACE_ID); + } + + if (CollectionUtils.isEmpty(domSet)) { + result.put("doms", new HashSet<>()); + result.put("count", 0); + return result; + } + + for (String dom : domSet) { + if (DistroMapper.responsible(dom) || !responsibleOnly) { + doms.add(dom); + } + } + + result.put("doms", doms); + result.put("count", doms.size()); + return result; + } +====2 +1:1904a +3:1100a +2:1132c + <<<<<<< HEAD +====1 +1:1906c + Map> domMap = domainsManager.getAllDomNames(); +2:1134c +3:1102c + Map> domMap = serviceManager.getAllDomNames(); +====2 +1:1907a +3:1103a +2:1136,1142c + ||||||| c863cbcde + + Map> domMap = domainsManager.getAllDomNames(); + + ======= + int count = 0; + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:1911c + if (DistroMapper.responsible(dom) || !responsibleOnly) { +2:1146c +3:1107c + if (distroMapper.responsible(dom) || !responsibleOnly) { +====2 +1:1914a +3:1110a +2:1150c + count += doms.get(namespaceId).size(); +====2 +1:1917,1918c +3:1113,1114c + JSONObject result = new JSONObject(); + +2:1152a +====2 +1:1920c +3:1116c + result.put("count", doms.size()); +2:1154c + result.put("count", count); +====1 +1:1933,1934c + List doms + = domainsManager.searchDomains(namespaceId, ".*" + expr + ".*"); +2:1167,1168c +3:1129,1130c + List doms + = serviceManager.searchDomains(namespaceId, ".*" + expr + ".*"); +====1 +1:1980c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1214c +3:1176c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2063c + domainsManager.easyAddOrReplaceDom(domObj); +2:1297c +3:1259c + serviceManager.addOrReplaceService(domObj); +====1 +1:2082c + result.put("status", DistroMapper.getDistroConfig()); +2:1316c +3:1278c + result.put("status", distroMapper.getDistroConfig()); +====1 +1:2087c + DistroMapper.clean(); +2:1321c +3:1283c + distroMapper.clean(); +====1 +1:2099,2100c + int domCount = domainsManager.getDomCount(); + int ipCount = domainsManager.getInstanceCount(); +2:1333,1334c +3:1295,1296c + int domCount = serviceManager.getDomCount(); + int ipCount = serviceManager.getInstanceCount(); +====1 +1:2102,2103c + int responsibleDomCount = domainsManager.getResponsibleDomCount(); + int responsibleIPCount = domainsManager.getResponsibleIPCount(); +2:1336,1337c +3:1298,1299c + int responsibleDomCount = serviceManager.getResponsibleDomCount(); + int responsibleIPCount = serviceManager.getResponsibleIPCount(); +====1 +1:2112c + result.put("notifyTask", RaftCore.notifier.getTaskSize()); +2:1345a +3:1307a +====1 +1:2198c + DistroMapper.onReceiveServerStatus(serverStatus); +2:1431c +3:1393c + distroMapper.onReceiveServerStatus(serverStatus); +====1 +1:2209c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1442c +3:1404c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2229c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1462c +3:1424c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2237c + result.put("responsibleServer", DistroMapper.mapSrv(dom)); +2:1470c +3:1432c + result.put("responsibleServer", distroMapper.mapSrv(dom)); +====1 +1:2246c + result.put("healthyList", DistroMapper.getHealthyList()); +2:1479c +3:1441c + result.put("healthyList", distroMapper.getHealthyList()); +====1 +1:2256c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1489c +3:1451c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2264c + result.put("responsible", DistroMapper.responsible(dom)); +2:1497c +3:1459c + result.put("responsible", distroMapper.responsible(dom)); +====1 +1:2275c + if (!NamingProxy.getServers().contains(serverIP)) { +2:1508c +3:1470c + if (!serverListManager.contains(serverIP)) { +====1 +1:2280c + DomainsManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, DomainsManager.DomainChecksum.class); +2:1513c +3:1475c + ServiceManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, ServiceManager.DomainChecksum.class); +====1 +1:2292c + Domain domain = domainsManager.getDomain(checksums.namespaceId, dom); +2:1525c +3:1487c + Domain domain = serviceManager.getService(checksums.namespaceId, dom); +====1 +1:2305c + domainsManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +2:1538c +3:1500c + serviceManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +====1 +1:2354c + pac.put("checkServer", DistroMapper.mapSrvName(vDom.getName())); +2:1587c +3:1549c + pac.put("checkServer", distroMapper.mapSrvName(vDom.getName())); +====1 +1:2432,2433c + public void setDomainsManager(DomainsManager domainsManager) { + this.domainsManager = domainsManager; +2:1665,1666c +3:1627,1628c + public void setServiceManager(ServiceManager serviceManager) { + this.serviceManager = serviceManager; diff --git a/src/python/merge_conflict_analysis_diffs/128/intellimerge/diff_ApiCommands.java.txt b/src/python/merge_conflict_analysis_diffs/128/intellimerge/diff_ApiCommands.java.txt new file mode 100644 index 0000000000..6d144cc367 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/128/intellimerge/diff_ApiCommands.java.txt @@ -0,0 +1,1549 @@ +====1 +1:27a +2:28c +3:28c + import com.alibaba.nacos.naming.cluster.ServerListManager; +====1 +1:35,42c + import com.alibaba.nacos.naming.raft.Datum; + import com.alibaba.nacos.naming.raft.RaftCore; + import com.alibaba.nacos.naming.raft.RaftPeer; + import com.alibaba.nacos.naming.raft.RaftProxy; + import com.ning.http.client.AsyncCompletionHandler; + import com.ning.http.client.Response; + import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; + import org.apache.catalina.util.ParameterMap; +2:35a +3:35a +====1 +1:65,69c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.TimeUnit; + import java.util.concurrent.locks.Condition; + import java.util.concurrent.locks.Lock; + import java.util.concurrent.locks.ReentrantLock; +2:57a +3:57a +====1 +1:84c + protected DomainsManager domainsManager; +2:72,87c +3:72,87c + protected ServiceManager serviceManager; + + @Autowired + private SwitchManager switchManager; + + @Autowired + private ServerListManager serverListManager; + + @Autowired + private SwitchDomain switchDomain; + + @Autowired + private PushService pushService; + + @Autowired + private DistroMapper distroMapper; +====1 +1:112c + result.put("cacheMillis", Switch.getPushCacheMillis(client.getDom())); +2:115c +3:115c + result.put("cacheMillis", switchDomain.getPushCacheMillis(client.getDom())); +====1 +1:126c + Domain dom = domainsManager.getDomain(namespaceId, name); +2:129c +3:129c + Domain dom = serviceManager.getService(namespaceId, name); +====1 +1:138c + result.put("count", domainsManager.getDomCount()); +2:141c +3:141c + result.put("count", serviceManager.getDomCount()); +====1 +1:151c + = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:154c +3:154c + = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:182c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:185c +3:185c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:212c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:215c +3:215c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:273c + if (domainsManager.getDomain(namespaceId, dom) != null) { +2:276c +3:276c + if (serviceManager.getService(namespaceId, dom) != null) { +====1 +1:292,294c + String dom = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(dom)) { + dom = WebUtils.required(request, "dom"); +2:295,297c +3:295,297c + String serviceName = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(serviceName)) { + serviceName = WebUtils.required(request, "dom"); +====1 +1:296,297c + String app; + app = WebUtils.optional(request, "app", StringUtils.EMPTY); +2:299c +3:299c + +====1 +1:305c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, dom); +2:307c +3:307c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, serviceName); +====1 +1:308,314c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + Map stringMap = new HashMap<>(16); + stringMap.put(Constants.REQUEST_PARAM_SERVICE_NAME, Arrays.asList(dom).toArray(new String[1])); + stringMap.put("enableClientBeat", Arrays.asList("true").toArray(new String[1])); + stringMap.put("cktype", Arrays.asList("TCP").toArray(new String[1])); + stringMap.put("appName", Arrays.asList(app).toArray(new String[1])); + stringMap.put("clusterName", Arrays.asList(clusterName).toArray(new String[1])); +2:310,311c +3:310,311c + IpAddress ipAddress = serviceManager.getInstance(namespaceId, serviceName, clientBeat.getCluster(), clientBeat.getIp(), + clientBeat.getPort()); +====1 +1:316,319c + //if domain does not exist, register it. + if (virtualClusterDomain == null) { + regDom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("dom not found, register it, dom: {}", dom); +2:313,322c +3:313,322c + if (ipAddress == null) { + ipAddress = new IpAddress(); + ipAddress.setPort(clientBeat.getPort()); + ipAddress.setIp(clientBeat.getIp()); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(serviceName); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:322,325c + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + String ip = clientBeat.getIp(); + int port = clientBeat.getPort(); +2:325c +3:325c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:327,354c + IpAddress ipAddress = new IpAddress(); + ipAddress.setPort(port); + ipAddress.setIp(ip); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(dom); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } + + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", Switch.getClientBeatInterval()); + + if (!virtualClusterDomain.allIPs().contains(ipAddress)) { + + if (!virtualClusterDomain.getEnableClientBeat()) { + return result; + } + + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("ip not found, register it, dom: {}, ip: {}", dom, ipAddress); +2:327,328c +3:327,328c + if (virtualClusterDomain == null) { + throw new NacosException(NacosException.SERVER_ERROR, "service not found: " + serviceName + "@" + namespaceId); +====1 +1:357,359c + if (!DistroMapper.responsible(dom)) { + String server = DistroMapper.mapSrv(dom); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", dom, server); +2:331,333c +3:331,333c + if (!distroMapper.responsible(serviceName)) { + String server = distroMapper.mapSrv(serviceName); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", serviceName, server); +====1 +1:381a +2:356,359c +3:356,359c + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", switchDomain.getClientBeatInterval()); + +====1 +1:385c + +2:362a +3:362a +====1 +1:400c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +2:377c +3:377c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +====1 +1:406c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +2:383c +3:383c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +====1 +1:503c + domainsManager.easyAddOrReplaceDom(domObj); +2:480c +3:480c + serviceManager.addOrReplaceService(domObj); +====1 +1:534c + String dom = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +2:511c +3:511c + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +====1 +1:536c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:513c +3:513c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:541,547c + ParameterMap parameterMap = new ParameterMap<>(); + parameterMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + parameterMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + parameterMap.put("json", Arrays.asList("true").toArray(new String[1])); + parameterMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + return remvIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, parameterMap)); +2:518c +3:518c + serviceManager.removeInstance(namespaceId, serviceName, ipAddress); +====1 +1:548a +2:520c +3:520c + return "ok"; +====1 +1:551c + @SuppressFBWarnings("JLM_JSR166_LOCK_MONITORENTER") +2:522a +3:522a +====1 +1:555,556c + String dom = WebUtils.required(request, "serviceName"); + String tenant = WebUtils.optional(request, "tid", StringUtils.EMPTY); +2:526,527c +3:526,527c + String serviceName = WebUtils.required(request, "serviceName"); + String clusterName = WebUtils.required(request, "clusterName"); +====1 +1:558c + String env = WebUtils.optional(request, "env", StringUtils.EMPTY); +2:528a +3:528a +====1 +1:560,562c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, UtilsAndCommons.getDefaultNamespaceId()); + + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:530,531c +3:530,531c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +====1 +1:566c + ipAddress.setServiceName(dom); +2:535c +3:535c + ipAddress.setServiceName(serviceName); +====1 +1:573,616c + if (virtualClusterDomain == null) { + + Lock lock = domainsManager.addLockIfAbsent(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + Condition condition = domainsManager.addCondtion(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + try { + regDom(request); + } catch (Exception e) { + Loggers.SRV_LOG.error("[REG-SERIVCE] register service failed, service:" + dom, e); + } + } + }); + try { + lock.lock(); + condition.await(5000, TimeUnit.MILLISECONDS); + } finally { + lock.unlock(); + } + + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + } + + if (virtualClusterDomain != null) { + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(request); + } + + if (Loggers.SRV_LOG.isDebugEnabled()) { + Loggers.SRV_LOG.debug("reg-service add ip: {}|{}", dom, ipAddress.toJSON()); + } + + Map stringMap = new HashMap<>(16); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } else { + throw new IllegalArgumentException("dom not found: " + dom); + } +2:542c +3:542c + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:621c + +2:546a +3:546a +====1 +1:628c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, name); +2:553c +3:553c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, name); +====1 +1:633,637c + RaftPeer leader = RaftCore.getLeader(); + if (leader == null) { + throw new IllegalStateException("not leader at present, cannot update"); + } + +2:557a +3:557a +====1 +1:678c + if (cktype.equals(AbstractHealthCheckProcessor.HTTP_PROCESSOR.getType())) { +2:598c +3:598c + if (cktype.equals(HealthCheckType.HTTP.name().toLowerCase())) { +====1 +1:683c + } else if (cktype.equals(AbstractHealthCheckProcessor.TCP_PROCESSOR.getType())) { +2:603c +3:603c + } else if (cktype.equals(HealthCheckType.TCP.name().toLowerCase())) { +====1 +1:687c + } else if (cktype.equals(AbstractHealthCheckProcessor.MYSQL_PROCESSOR.getType())) { +2:607c +3:607c + } else if (cktype.equals(HealthCheckType.MYSQL.name().toLowerCase())) { +====1 +1:779c + domainsManager.easyAddOrReplaceDom(dom); +2:699c +3:699c + serviceManager.addOrReplaceService(dom); +====1 +1:787c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + domainsManager.getDomCount() +2:707c +3:707c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + serviceManager.getDomCount() +====1 +1:792c + +2:711a +3:711a +====1 +1:800c + if (domainsManager.getDomain(namespaceId, dom) == null) { +2:719c +3:719c + if (serviceManager.getService(namespaceId, dom) == null) { +====1 +1:804c + domainsManager.easyRemoveDom(namespaceId, dom); +2:723c +3:723c + serviceManager.easyRemoveDom(namespaceId, dom); +====1 +1:814c + Map> domMap = domainsManager.getAllDomNames(); +2:733c +3:733c + Map> domMap = serviceManager.getAllDomNames(); +====1 +1:818c + Domain domObj = domainsManager.getDomain(namespaceId, dom); +2:737c +3:737c + Domain domObj = serviceManager.getService(namespaceId, dom); +====1 +1:841,979c + @RequestMapping("/onAddIP4Dom") + public String onAddIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer {} tried to publish data but wasn't leader, leader: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: {}, cur-term: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term.get()); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + final String dom = WebUtils.required(request, "dom"); + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", Boolean.FALSE.toString())); + + String ipListString = WebUtils.required(request, "ipList"); + List newIPs = new ArrayList<>(); + + List ipList; + if (Boolean.parseBoolean(WebUtils.optional(request, SwitchEntry.PARAM_JSON, Boolean.FALSE.toString()))) { + newIPs = JSON.parseObject(ipListString, new TypeReference>() { + }); + } else { + ipList = Arrays.asList(ipListString.split(",")); + for (String ip : ipList) { + IpAddress ipAddr = IpAddress.fromJSON(ip); + newIPs.add(ipAddr); + } + } + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domainsManager.getDomain(namespaceId, dom).allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + domainsManager.easyAddIP4Dom(namespaceId, dom, newIPs, term); + + return "ok"; + } + + private void syncOnUpdateIP4Dom(String namespaceId, String dom, Map proxyParams, String action) throws InterruptedException { + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + final CountDownLatch countDownLatch = new CountDownLatch(RaftCore.getPeerSet().majorityCount()); + updateIpPublish(proxyParams, countDownLatch, action); + if (!countDownLatch.await(UtilsAndCommons.MAX_PUBLISH_WAIT_TIME_MILLIS, TimeUnit.MILLISECONDS)) { + Loggers.RAFT.info("data publish failed, key=" + key, ",notify timeout."); + throw new IllegalArgumentException("data publish failed, key=" + key); + } + } + + private void syncOnAddIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void asyncOnAddIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void syncOnRemvIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void asyncOnRemvIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void updateIpPublish(Map proxyParams, CountDownLatch countDownLatch, String action) { + + for (final String peer : RaftCore.getPeerSet().allServersWithoutMySelf()) { + + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + + String server = peer; + + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String api = action.equals("remove") ? "onRemvIP4Dom" : "onAddIP4Dom"; + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/" + api; + + try { + HttpClient.asyncHttpPost(url, null, proxyParams, new AsyncCompletionHandler() { + @Override + public Integer onCompleted(Response response) throws Exception { + if (response.getStatusCode() != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip params: " + proxyParams + + ",code: " + response.getStatusCode() + ", caused " + response.getResponseBody() + + ", server: " + peer); + return 1; + } + if (countDownLatch != null) { + countDownLatch.countDown(); + } + return 0; + } + }); + } catch (Exception e) { + Loggers.SRV_LOG.error(action + "-IP", "failed when publish to peer." + url, e); + } + } + }); + } + } + +2:759a +3:759a +====1 +1:984c + if (Switch.getDisableAddIP()) { +2:764c +3:764c + if (switchDomain.isDisableAddIP()) { +====1 +1:988,989c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +2:767a +3:767a +====1 +1:999a +2:778,784c +3:778,784c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String clusterName = WebUtils.required(request, "clusterName"); + +====1 +1:1005c + ipList = Arrays.asList(ipListString); +2:789a +3:789a +====1 +1:1016,1110c + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/addIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip for dom, caused {}", result1.content); + throw new IllegalArgumentException("failed to add ip for dom, caused " + result1.content); + } + + return "ok"; + } + + final String dom = WebUtils.required(request, "dom"); + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", "false")); + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domain.allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + + String key = UtilsAndCommons.getIPListStoreKey(domain); + + Datum datum = RaftCore.getDatum(key); + if (datum == null) { + try { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).lock(); + datum = RaftCore.getDatum(key); + if (datum == null) { + datum = new Datum(); + datum.key = key; + RaftCore.addDatum(datum); + } + } finally { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).unlock(); + } + } + + long timestamp = RaftCore.getDatum(key).timestamp.get(); + + if (RaftCore.isLeader()) { + try { + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onAddIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnAddIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnAddIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + } +2:800c +3:800c + serviceManager.addInstance(namespaceId, serviceName, clusterName, newIPs.toArray(new IpAddress[newIPs.size()])); +====1 +1:1119c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:809c +3:809c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:1127c + long cacheMillis = Switch.getCacheMillis(dom); +2:817c +3:817c + long cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1131,1132c + if (udpPort > 0 && PushService.canEnablePush(agent)) { + PushService.addClient(namespaceId, dom, +2:821,822c +3:821,822c + if (udpPort > 0 && pushService.canEnablePush(agent)) { + pushService.addClient(namespaceId, dom, +====1 +1:1139c + cacheMillis = Switch.getPushCacheMillis(dom); +2:829c +3:829c + cacheMillis = switchDomain.getPushCacheMillis(dom); +====1 +1:1143c + cacheMillis = Switch.getCacheMillis(dom); +2:833c +3:833c + cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1233,1274c + @RequestMapping("/onRemvIP4Dom") + public void onRemvIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Deleting IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer(" + JSON.toJSONString(clientIP) + ") tried to publish " + + "data but wasn't leader, leader: " + JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: " + + JSON.toJSONString(clientIP) + ", cur-term: " + JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + final String dom = WebUtils.required(request, "dom"); + final String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + List removedIPs = getIpAddresses(request); + + if (CollectionUtils.isEmpty(removedIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + domainsManager.easyRemvIP4Dom(namespaceId, dom, removedIPs, term); + } + +2:922a +3:922a +====1 +1:1279,1280c + if (DistroMapper.getLocalhostIP().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + DistroMapper.getLocalhostIP()); +2:927,928c +3:927,928c + if (NetUtils.localServer().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + NetUtils.localServer()); +====1 +1:1308,1314c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + String dom = WebUtils.required(request, "dom"); + String ipListString = WebUtils.required(request, "ipList"); + + if (Loggers.DEBUG_LOG.isDebugEnabled()) { + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: serviceName: {}, iplist: {}", dom, ipListString); +2:956,957c +3:956,957c + if (switchDomain.isDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); +====1 +1:1323c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments, params: {}", proxyParams); +2:966c +3:966c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: {}", proxyParams); +====1 +1:1326c + List ipList = new ArrayList<>(); +2:969,976c +3:969,976c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String ipListString = WebUtils.required(request, "ipList"); + final List ipList; + List removedIPs = new ArrayList<>(); +====1 +1:1328c + List ipObjList = new ArrayList<>(ipList.size()); +2:977a +3:977a +====1 +1:1330,1331c + ipList = Arrays.asList(ipListString); + ipObjList = JSON.parseObject(ipListString, new TypeReference>() { +2:979c +3:979c + removedIPs = JSON.parseObject(ipListString, new TypeReference>() { +====1 +1:1336,1359c + ipObjList.add(IpAddress.fromJSON(ip)); + } + } + + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/remvIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to remove ip for dom, caused: {}", result1.content); + throw new IllegalArgumentException("failed to remove ip for dom, caused " + result1.content); +2:984,985c +3:984,985c + IpAddress ipAddr = IpAddress.fromJSON(ip); + removedIPs.add(ipAddr); +====1 +1:1361,1379c + + return "ok"; + } + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + if (CollectionUtils.isEmpty(ipObjList)) { + throw new IllegalArgumentException("Empty ip list"); + } + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + long timestamp = 1; + if (RaftCore.getDatum(key) != null) { + timestamp = RaftCore.getDatum(key).timestamp.get(); +2:986a +3:986a +====1 +1:1382,1412c + if (RaftCore.isLeader()) { + + try { + + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onRemvIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnRemvIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnRemvIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + Loggers.EVT_LOG.info("dom: {} {POS} {IP-REMV} new: {} operatorIP: {}", + dom, ipListString, WebUtils.optional(request, "clientIP", "unknown")); + } +2:989c +3:989c + serviceManager.removeInstance(namespaceId, serviceName, removedIPs.toArray(new IpAddress[removedIPs.size()])); +====1 +1:1426,1428c + int failedPushCount = PushService.getFailedPushCount(); + result.put("succeed", PushService.getTotalPush() - failedPushCount); + result.put("total", PushService.getTotalPush()); +2:1003,1005c +3:1003,1005c + int failedPushCount = pushService.getFailedPushCount(); + result.put("succeed", pushService.getTotalPush() - failedPushCount); + result.put("total", pushService.getTotalPush()); +====1 +1:1430,1431c + if (PushService.getTotalPush() > 0) { + result.put("ratio", ((float) PushService.getTotalPush() - failedPushCount) / PushService.getTotalPush()); +2:1007,1008c +3:1007,1008c + if (pushService.getTotalPush() > 0) { + result.put("ratio", ((float) pushService.getTotalPush() - failedPushCount) / pushService.getTotalPush()); +====1 +1:1457,1459c + + ReentrantLock lock = new ReentrantLock(); + +2:1033a +3:1033a +====1 +1:1463a +2:1038,1039c +3:1038,1039c + String entry = WebUtils.required(request, "entry"); + String value = WebUtils.required(request, "value"); +====1 +1:1465,1846c + if (!RaftCore.isLeader() && !debug) { + Map tmpParams = new HashMap<>(16); + for (Map.Entry entry : request.getParameterMap().entrySet()) { + tmpParams.put(entry.getKey(), entry.getValue()[0]); + } + + RaftProxy.proxyGET(UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/updateSwitch", tmpParams); + return "ok"; + } + + try { + lock.lock(); + String entry = WebUtils.required(request, "entry"); + + Datum datum = RaftCore.getDatum(UtilsAndCommons.DOMAINS_DATA_ID_PRE + UtilsAndCommons.SWITCH_DOMAIN_NAME); + SwitchDomain switchDomain = null; + + if (datum != null) { + switchDomain = JSON.parseObject(datum.value, SwitchDomain.class); + } else { + Loggers.SRV_LOG.warn("datum: {}{} is null", UtilsAndCommons.DOMAINS_DATA_ID_PRE, UtilsAndCommons.SWITCH_DOMAIN_NAME); + } + + if (SwitchEntry.BATCH.equals(entry)) { + //batch update + SwitchDomain dom = JSON.parseObject(WebUtils.required(request, "json"), SwitchDomain.class); + dom.setEnableStandalone(Switch.isEnableStandalone()); + if (dom.httpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN + || dom.tcpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN) { + + throw new IllegalArgumentException("min check time for http or tcp is too small(<500)"); + } + + if (dom.httpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX + || dom.tcpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX) { + + throw new IllegalArgumentException("max check time for http or tcp is too small(<3000)"); + } + + if (dom.httpHealthParams.getFactor() < 0 + || dom.httpHealthParams.getFactor() > 1 + || dom.tcpHealthParams.getFactor() < 0 + || dom.tcpHealthParams.getFactor() > 1) { + + throw new IllegalArgumentException("malformed factor"); + } + + Switch.setDom(dom); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (switchDomain != null) { + Switch.setDom(switchDomain); + } + + if (entry.equals(SwitchEntry.DISTRO_THRESHOLD)) { + Float threshold = Float.parseFloat(WebUtils.required(request, "distroThreshold")); + + if (threshold <= 0) { + throw new IllegalArgumentException("distroThreshold can not be zero or negative: " + threshold); + } + + Switch.setDistroThreshold(threshold); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + + if (entry.equals(SwitchEntry.ENABLE_ALL_DOM_NAME_CACHE)) { + Boolean enable = Boolean.parseBoolean(WebUtils.required(request, "enableAllDomNameCache")); + Switch.setAllDomNameCache(enable); + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.INCREMENTAL_LIST)) { + String action = WebUtils.required(request, "action"); + List doms = Arrays.asList(WebUtils.required(request, "incrementalList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getIncrementalList().addAll(doms); + } else if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getIncrementalList().removeAll(doms); + } else { + throw new IllegalArgumentException("action is not allowed: " + action); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_WHITLE_LIST)) { + String action = WebUtils.required(request, "action"); + List whiteList = Arrays.asList(WebUtils.required(request, "healthCheckWhiteList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getHealthCheckWhiteList().addAll(whiteList); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getHealthCheckWhiteList().removeAll(whiteList); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.CLIENT_BEAT_INTERVAL)) { + long clientBeatInterval = Long.parseLong(WebUtils.required(request, "clientBeatInterval")); + Switch.setClientBeatInterval(clientBeatInterval); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setPushJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setPushPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setPushCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_GO, type)) { + Switch.setPushGoVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.TRAFFIC_SCHEDULING_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setTrafficSchedulingJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setTrafficSchedulingPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setTrafficSchedulingCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_TENGINE, type)) { + Switch.setTrafficSchedulingTengineVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_PUSH_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min cache time for http or tcp is too small(<10000)"); + } + + Switch.setPushCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + // extremely careful while modifying this, cause it will affect all clients without pushing enabled + if (entry.equals(SwitchEntry.DEFAULT_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min default cache time is too small(<1000)"); + } + + Switch.setCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.MASTERS)) { + List masters = Arrays.asList(WebUtils.required(request, "names").split(",")); + + Switch.setMasters(masters); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISTRO)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setDistroEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.CHECK)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setHeathCheckEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DEFAULT_HEALTH_CHECK_MODE)) { + String defaultHealthCheckMode = WebUtils.required(request, "mode"); + + Switch.setDefaultHealthCheckMode(defaultHealthCheckMode); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DOM_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_DOM_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("domStatusSynchronizationPeriodMillis is too small(<5000)"); + } + + Switch.setDomStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SERVER_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_SERVER_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("serverStatusSynchronizationPeriodMillis is too small(<15000)"); + } + + Switch.setServerStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_TIMES)) { + Integer times = Integer.parseInt(WebUtils.required(request, "times")); + + Switch.setCheckTimes(times); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISABLE_ADD_IP)) { + boolean disableAddIP = Boolean.parseBoolean(WebUtils.required(request, "disableAddIP")); + + Switch.setDisableAddIP(disableAddIP); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.ENABLE_CACHE)) { + boolean enableCache = Boolean.parseBoolean(WebUtils.required(request, "enableCache")); + + Switch.setEnableCache(enableCache); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SEND_BEAT_ONLY)) { + boolean sendBeatOnly = Boolean.parseBoolean(WebUtils.required(request, "sendBeatOnly")); + + Switch.setSendBeatOnly(sendBeatOnly); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.LIMITED_URL_MAP)) { + Map limitedUrlMap = new HashMap<>(16); + String limitedUrls = WebUtils.required(request, "limitedUrls"); + + if (!StringUtils.isEmpty(limitedUrls)) { + String[] entries = limitedUrls.split(","); + for (int i = 0; i < entries.length; i++) { + String[] parts = entries[i].split(":"); + if (parts.length < 2) { + throw new IllegalArgumentException("invalid input for limited urls"); + } + + String limitedUrl = parts[0]; + if (StringUtils.isEmpty(limitedUrl)) { + throw new IllegalArgumentException("url can not be empty, url: " + limitedUrl); + } + + int statusCode = Integer.parseInt(parts[1]); + if (statusCode <= 0) { + throw new IllegalArgumentException("illegal normal status code: " + statusCode); + } + + limitedUrlMap.put(limitedUrl, statusCode); + + } + + Switch.setLimitedUrlMap(limitedUrlMap); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.ENABLE_STANDALONE)) { + String enable = WebUtils.required(request, "enableStandalone"); + + if (!StringUtils.isNotEmpty(enable)) { + Switch.setEnableStandalone(Boolean.parseBoolean(enable)); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + + throw new IllegalArgumentException("update entry not found: " + entry); + } finally { + lock.unlock(); + } + +2:1041c +3:1041c + switchManager.update(entry, value, debug); +====1 +1:1847a +2:1043c +3:1043c + return "ok"; +====1 +1:1858c + return JSON.parseObject(Switch.getDom().toJSON()); +2:1054c +3:1054c + return JSON.parseObject(switchDomain.toJSON()); +====2 +1:1902a +3:1098a +2:1099,1129c + Map> domMap = domainsManager.getAllDomNames(); + JSONObject result = new JSONObject(); + // For old DNS-F client: + String dnsfVersion = "1.0.1"; + String agent = request.getHeader("Client-Version"); + ClientInfo clientInfo = new ClientInfo(agent); + if (clientInfo.type == ClientInfo.ClientType.DNS && clientInfo.version.compareTo(VersionUtil.parseVersion(dnsfVersion)) <= 0) { + + List doms = new ArrayList(); + Set domSet = null; + + if (domMap.containsKey(Constants.REQUEST_PARAM_DEFAULT_NAMESPACE_ID)) { + domSet = domMap.get(Constants.REQUEST_PARAM_DEFAULT_NAMESPACE_ID); + } + + if (CollectionUtils.isEmpty(domSet)) { + result.put("doms", new HashSet<>()); + result.put("count", 0); + return result; + } + + for (String dom : domSet) { + if (DistroMapper.responsible(dom) || !responsibleOnly) { + doms.add(dom); + } + } + + result.put("doms", doms); + result.put("count", doms.size()); + return result; + } +====2 +1:1904a +3:1100a +2:1132c + <<<<<<< HEAD +====1 +1:1906c + Map> domMap = domainsManager.getAllDomNames(); +2:1134c +3:1102c + Map> domMap = serviceManager.getAllDomNames(); +====2 +1:1907a +3:1103a +2:1136,1142c + ||||||| c863cbcde + + Map> domMap = domainsManager.getAllDomNames(); + + ======= + int count = 0; + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:1911c + if (DistroMapper.responsible(dom) || !responsibleOnly) { +2:1146c +3:1107c + if (distroMapper.responsible(dom) || !responsibleOnly) { +====2 +1:1914a +3:1110a +2:1150c + count += doms.get(namespaceId).size(); +====2 +1:1917,1918c +3:1113,1114c + JSONObject result = new JSONObject(); + +2:1152a +====2 +1:1920c +3:1116c + result.put("count", doms.size()); +2:1154c + result.put("count", count); +====1 +1:1933,1934c + List doms + = domainsManager.searchDomains(namespaceId, ".*" + expr + ".*"); +2:1167,1168c +3:1129,1130c + List doms + = serviceManager.searchDomains(namespaceId, ".*" + expr + ".*"); +====1 +1:1980c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1214c +3:1176c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2063c + domainsManager.easyAddOrReplaceDom(domObj); +2:1297c +3:1259c + serviceManager.addOrReplaceService(domObj); +====1 +1:2082c + result.put("status", DistroMapper.getDistroConfig()); +2:1316c +3:1278c + result.put("status", distroMapper.getDistroConfig()); +====1 +1:2087c + DistroMapper.clean(); +2:1321c +3:1283c + distroMapper.clean(); +====1 +1:2099,2100c + int domCount = domainsManager.getDomCount(); + int ipCount = domainsManager.getInstanceCount(); +2:1333,1334c +3:1295,1296c + int domCount = serviceManager.getDomCount(); + int ipCount = serviceManager.getInstanceCount(); +====1 +1:2102,2103c + int responsibleDomCount = domainsManager.getResponsibleDomCount(); + int responsibleIPCount = domainsManager.getResponsibleIPCount(); +2:1336,1337c +3:1298,1299c + int responsibleDomCount = serviceManager.getResponsibleDomCount(); + int responsibleIPCount = serviceManager.getResponsibleIPCount(); +====1 +1:2112c + result.put("notifyTask", RaftCore.notifier.getTaskSize()); +2:1345a +3:1307a +====1 +1:2198c + DistroMapper.onReceiveServerStatus(serverStatus); +2:1431c +3:1393c + distroMapper.onReceiveServerStatus(serverStatus); +====1 +1:2209c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1442c +3:1404c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2229c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1462c +3:1424c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2237c + result.put("responsibleServer", DistroMapper.mapSrv(dom)); +2:1470c +3:1432c + result.put("responsibleServer", distroMapper.mapSrv(dom)); +====1 +1:2246c + result.put("healthyList", DistroMapper.getHealthyList()); +2:1479c +3:1441c + result.put("healthyList", distroMapper.getHealthyList()); +====1 +1:2256c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1489c +3:1451c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2264c + result.put("responsible", DistroMapper.responsible(dom)); +2:1497c +3:1459c + result.put("responsible", distroMapper.responsible(dom)); +====1 +1:2275c + if (!NamingProxy.getServers().contains(serverIP)) { +2:1508c +3:1470c + if (!serverListManager.contains(serverIP)) { +====1 +1:2280c + DomainsManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, DomainsManager.DomainChecksum.class); +2:1513c +3:1475c + ServiceManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, ServiceManager.DomainChecksum.class); +====1 +1:2292c + Domain domain = domainsManager.getDomain(checksums.namespaceId, dom); +2:1525c +3:1487c + Domain domain = serviceManager.getService(checksums.namespaceId, dom); +====1 +1:2305c + domainsManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +2:1538c +3:1500c + serviceManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +====1 +1:2354c + pac.put("checkServer", DistroMapper.mapSrvName(vDom.getName())); +2:1587c +3:1549c + pac.put("checkServer", distroMapper.mapSrvName(vDom.getName())); +====1 +1:2432,2433c + public void setDomainsManager(DomainsManager domainsManager) { + this.domainsManager = domainsManager; +2:1665,1666c +3:1627,1628c + public void setServiceManager(ServiceManager serviceManager) { + this.serviceManager = serviceManager; diff --git a/src/python/merge_conflict_analysis_diffs/128/spork/diff_ApiCommands.java.txt b/src/python/merge_conflict_analysis_diffs/128/spork/diff_ApiCommands.java.txt new file mode 100644 index 0000000000..52d80407b5 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/128/spork/diff_ApiCommands.java.txt @@ -0,0 +1,1481 @@ +====1 +1:27a +2:28c +3:28c + import com.alibaba.nacos.naming.cluster.ServerListManager; +====1 +1:35,42c + import com.alibaba.nacos.naming.raft.Datum; + import com.alibaba.nacos.naming.raft.RaftCore; + import com.alibaba.nacos.naming.raft.RaftPeer; + import com.alibaba.nacos.naming.raft.RaftProxy; + import com.ning.http.client.AsyncCompletionHandler; + import com.ning.http.client.Response; + import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; + import org.apache.catalina.util.ParameterMap; +2:35a +3:35a +====1 +1:65,69c + import java.util.concurrent.CountDownLatch; + import java.util.concurrent.TimeUnit; + import java.util.concurrent.locks.Condition; + import java.util.concurrent.locks.Lock; + import java.util.concurrent.locks.ReentrantLock; +2:57a +3:57a +====1 +1:84c + protected DomainsManager domainsManager; +2:72,87c +3:72,87c + protected ServiceManager serviceManager; + + @Autowired + private SwitchManager switchManager; + + @Autowired + private ServerListManager serverListManager; + + @Autowired + private SwitchDomain switchDomain; + + @Autowired + private PushService pushService; + + @Autowired + private DistroMapper distroMapper; +====1 +1:112c + result.put("cacheMillis", Switch.getPushCacheMillis(client.getDom())); +2:115c +3:115c + result.put("cacheMillis", switchDomain.getPushCacheMillis(client.getDom())); +====1 +1:126c + Domain dom = domainsManager.getDomain(namespaceId, name); +2:129c +3:129c + Domain dom = serviceManager.getService(namespaceId, name); +====1 +1:138c + result.put("count", domainsManager.getDomCount()); +2:141c +3:141c + result.put("count", serviceManager.getDomCount()); +====1 +1:151c + = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:154c +3:154c + = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:182c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:185c +3:185c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:212c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, domName); +2:215c +3:215c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, domName); +====1 +1:273c + if (domainsManager.getDomain(namespaceId, dom) != null) { +2:276c +3:276c + if (serviceManager.getService(namespaceId, dom) != null) { +====1 +1:292,294c + String dom = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(dom)) { + dom = WebUtils.required(request, "dom"); +2:295,297c +3:295,297c + String serviceName = WebUtils.optional(request, "serviceName", StringUtils.EMPTY); + if (StringUtils.isBlank(serviceName)) { + serviceName = WebUtils.required(request, "dom"); +====1 +1:296,297c + String app; + app = WebUtils.optional(request, "app", StringUtils.EMPTY); +2:299c +3:299c + +====1 +1:305c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, dom); +2:307c +3:307c + Loggers.DEBUG_LOG.debug("[CLIENT-BEAT] full arguments: beat: {}, serviceName: {}", clientBeat, serviceName); +====1 +1:308,314c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + Map stringMap = new HashMap<>(16); + stringMap.put(Constants.REQUEST_PARAM_SERVICE_NAME, Arrays.asList(dom).toArray(new String[1])); + stringMap.put("enableClientBeat", Arrays.asList("true").toArray(new String[1])); + stringMap.put("cktype", Arrays.asList("TCP").toArray(new String[1])); + stringMap.put("appName", Arrays.asList(app).toArray(new String[1])); + stringMap.put("clusterName", Arrays.asList(clusterName).toArray(new String[1])); +2:310,311c +3:310,311c + IpAddress ipAddress = serviceManager.getInstance(namespaceId, serviceName, clientBeat.getCluster(), clientBeat.getIp(), + clientBeat.getPort()); +====1 +1:316,319c + //if domain does not exist, register it. + if (virtualClusterDomain == null) { + regDom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("dom not found, register it, dom: {}", dom); +2:313,322c +3:313,322c + if (ipAddress == null) { + ipAddress = new IpAddress(); + ipAddress.setPort(clientBeat.getPort()); + ipAddress.setIp(clientBeat.getIp()); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(serviceName); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:322,325c + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + String ip = clientBeat.getIp(); + int port = clientBeat.getPort(); +2:325c +3:325c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:327,354c + IpAddress ipAddress = new IpAddress(); + ipAddress.setPort(port); + ipAddress.setIp(ip); + ipAddress.setWeight(clientBeat.getWeight()); + ipAddress.setMetadata(clientBeat.getMetadata()); + ipAddress.setClusterName(clusterName); + ipAddress.setServiceName(dom); + ipAddress.setInstanceId(ipAddress.generateInstanceId()); + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } + + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", Switch.getClientBeatInterval()); + + if (!virtualClusterDomain.allIPs().contains(ipAddress)) { + + if (!virtualClusterDomain.getEnableClientBeat()) { + return result; + } + + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + Loggers.SRV_LOG.warn("ip not found, register it, dom: {}, ip: {}", dom, ipAddress); +2:327,328c +3:327,328c + if (virtualClusterDomain == null) { + throw new NacosException(NacosException.SERVER_ERROR, "service not found: " + serviceName + "@" + namespaceId); +====1 +1:357,359c + if (!DistroMapper.responsible(dom)) { + String server = DistroMapper.mapSrv(dom); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", dom, server); +2:331,333c +3:331,333c + if (!distroMapper.responsible(serviceName)) { + String server = distroMapper.mapSrv(serviceName); + Loggers.EVT_LOG.info("I'm not responsible for {}, proxy it to {}", serviceName, server); +====1 +1:381a +2:356,359c +3:356,359c + JSONObject result = new JSONObject(); + + result.put("clientBeatInterval", switchDomain.getClientBeatInterval()); + +====1 +1:385c + +2:362a +3:362a +====1 +1:400c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +2:377c +3:377c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.server.name())))); +====1 +1:406c + String.valueOf(Switch.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +2:383c +3:383c + String.valueOf(switchDomain.getDefaultHealthCheckMode().equals(HealthCheckMode.client.name())))); +====1 +1:503c + domainsManager.easyAddOrReplaceDom(domObj); +2:480c +3:480c + serviceManager.addOrReplaceService(domObj); +====1 +1:534c + String dom = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +2:511c +3:511c + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); +====1 +1:536c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:513c +3:513c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, serviceName); +====1 +1:541,547c + ParameterMap parameterMap = new ParameterMap<>(); + parameterMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + parameterMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + parameterMap.put("json", Arrays.asList("true").toArray(new String[1])); + parameterMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + return remvIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, parameterMap)); +2:518c +3:518c + serviceManager.removeInstance(namespaceId, serviceName, ipAddress); +====1 +1:548a +2:520c +3:520c + return "ok"; +====1 +1:551c + @SuppressFBWarnings("JLM_JSR166_LOCK_MONITORENTER") +2:522a +3:522a +====1 +1:555,556c + String dom = WebUtils.required(request, "serviceName"); + String tenant = WebUtils.optional(request, "tid", StringUtils.EMPTY); +2:526,527c +3:526,527c + String serviceName = WebUtils.required(request, "serviceName"); + String clusterName = WebUtils.required(request, "clusterName"); +====1 +1:558c + String env = WebUtils.optional(request, "env", StringUtils.EMPTY); +2:528a +3:528a +====1 +1:560,562c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, UtilsAndCommons.getDefaultNamespaceId()); + + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:530,531c +3:530,531c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +====1 +1:566c + ipAddress.setServiceName(dom); +2:535c +3:535c + ipAddress.setServiceName(serviceName); +====1 +1:573,616c + if (virtualClusterDomain == null) { + + Lock lock = domainsManager.addLockIfAbsent(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + Condition condition = domainsManager.addCondtion(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)); + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + try { + regDom(request); + } catch (Exception e) { + Loggers.SRV_LOG.error("[REG-SERIVCE] register service failed, service:" + dom, e); + } + } + }); + try { + lock.lock(); + condition.await(5000, TimeUnit.MILLISECONDS); + } finally { + lock.unlock(); + } + + virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + } + + if (virtualClusterDomain != null) { + + if (!virtualClusterDomain.getClusterMap().containsKey(ipAddress.getClusterName())) { + doAddCluster4Dom(request); + } + + if (Loggers.SRV_LOG.isDebugEnabled()) { + Loggers.SRV_LOG.debug("reg-service add ip: {}|{}", dom, ipAddress.toJSON()); + } + + Map stringMap = new HashMap<>(16); + stringMap.put("dom", Arrays.asList(dom).toArray(new String[1])); + stringMap.put("ipList", Arrays.asList(JSON.toJSONString(Arrays.asList(ipAddress))).toArray(new String[1])); + stringMap.put("json", Arrays.asList("true").toArray(new String[1])); + stringMap.put("token", Arrays.asList(virtualClusterDomain.getToken()).toArray(new String[1])); + + addIP4Dom(OverrideParameterRequestWrapper.buildRequest(request, stringMap)); + } else { + throw new IllegalArgumentException("dom not found: " + dom); + } +2:542c +3:542c + serviceManager.registerInstance(namespaceId, serviceName, clusterName, ipAddress); +====1 +1:621c + +2:546a +3:546a +====1 +1:628c + VirtualClusterDomain dom = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, name); +2:553c +3:553c + VirtualClusterDomain dom = (VirtualClusterDomain) serviceManager.getService(namespaceId, name); +====1 +1:633,637c + RaftPeer leader = RaftCore.getLeader(); + if (leader == null) { + throw new IllegalStateException("not leader at present, cannot update"); + } + +2:557a +3:557a +====1 +1:678c + if (cktype.equals(AbstractHealthCheckProcessor.HTTP_PROCESSOR.getType())) { +2:598c +3:598c + if (cktype.equals(HealthCheckType.HTTP.name().toLowerCase())) { +====1 +1:683c + } else if (cktype.equals(AbstractHealthCheckProcessor.TCP_PROCESSOR.getType())) { +2:603c +3:603c + } else if (cktype.equals(HealthCheckType.TCP.name().toLowerCase())) { +====1 +1:687c + } else if (cktype.equals(AbstractHealthCheckProcessor.MYSQL_PROCESSOR.getType())) { +2:607c +3:607c + } else if (cktype.equals(HealthCheckType.MYSQL.name().toLowerCase())) { +====1 +1:779c + domainsManager.easyAddOrReplaceDom(dom); +2:699c +3:699c + serviceManager.addOrReplaceService(dom); +====1 +1:787c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + domainsManager.getDomCount() +2:707c +3:707c + result.put("msg", "Hello! I am Nacos-Naming and healthy! total services: raft " + serviceManager.getDomCount() +====1 +1:792c + +2:711a +3:711a +====1 +1:800c + if (domainsManager.getDomain(namespaceId, dom) == null) { +2:719c +3:719c + if (serviceManager.getService(namespaceId, dom) == null) { +====1 +1:804c + domainsManager.easyRemoveDom(namespaceId, dom); +2:723c +3:723c + serviceManager.easyRemoveDom(namespaceId, dom); +====1 +1:814c + Map> domMap = domainsManager.getAllDomNames(); +2:733c +3:733c + Map> domMap = serviceManager.getAllDomNames(); +====1 +1:818c + Domain domObj = domainsManager.getDomain(namespaceId, dom); +2:737c +3:737c + Domain domObj = serviceManager.getService(namespaceId, dom); +====1 +1:841,979c + @RequestMapping("/onAddIP4Dom") + public String onAddIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer {} tried to publish data but wasn't leader, leader: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: {}, cur-term: {}", + JSON.toJSONString(clientIP), JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term.get()); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + final String dom = WebUtils.required(request, "dom"); + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", Boolean.FALSE.toString())); + + String ipListString = WebUtils.required(request, "ipList"); + List newIPs = new ArrayList<>(); + + List ipList; + if (Boolean.parseBoolean(WebUtils.optional(request, SwitchEntry.PARAM_JSON, Boolean.FALSE.toString()))) { + newIPs = JSON.parseObject(ipListString, new TypeReference>() { + }); + } else { + ipList = Arrays.asList(ipListString.split(",")); + for (String ip : ipList) { + IpAddress ipAddr = IpAddress.fromJSON(ip); + newIPs.add(ipAddr); + } + } + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domainsManager.getDomain(namespaceId, dom).allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + domainsManager.easyAddIP4Dom(namespaceId, dom, newIPs, term); + + return "ok"; + } + + private void syncOnUpdateIP4Dom(String namespaceId, String dom, Map proxyParams, String action) throws InterruptedException { + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + final CountDownLatch countDownLatch = new CountDownLatch(RaftCore.getPeerSet().majorityCount()); + updateIpPublish(proxyParams, countDownLatch, action); + if (!countDownLatch.await(UtilsAndCommons.MAX_PUBLISH_WAIT_TIME_MILLIS, TimeUnit.MILLISECONDS)) { + Loggers.RAFT.info("data publish failed, key=" + key, ",notify timeout."); + throw new IllegalArgumentException("data publish failed, key=" + key); + } + } + + private void syncOnAddIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void asyncOnAddIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_ADD); + } + + private void syncOnRemvIP4Dom(String namespaceId, String dom, Map proxyParams) throws InterruptedException { + syncOnUpdateIP4Dom(namespaceId, dom, proxyParams, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void asyncOnRemvIP4Dom(Map proxyParams) { + updateIpPublish(proxyParams, null, UtilsAndCommons.UPDATE_INSTANCE_ACTION_REMOVE); + } + + private void updateIpPublish(Map proxyParams, CountDownLatch countDownLatch, String action) { + + for (final String peer : RaftCore.getPeerSet().allServersWithoutMySelf()) { + + UtilsAndCommons.RAFT_PUBLISH_EXECUTOR.execute(new Runnable() { + @Override + public void run() { + + String server = peer; + + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String api = action.equals("remove") ? "onRemvIP4Dom" : "onAddIP4Dom"; + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/" + api; + + try { + HttpClient.asyncHttpPost(url, null, proxyParams, new AsyncCompletionHandler() { + @Override + public Integer onCompleted(Response response) throws Exception { + if (response.getStatusCode() != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip params: " + proxyParams + + ",code: " + response.getStatusCode() + ", caused " + response.getResponseBody() + + ", server: " + peer); + return 1; + } + if (countDownLatch != null) { + countDownLatch.countDown(); + } + return 0; + } + }); + } catch (Exception e) { + Loggers.SRV_LOG.error(action + "-IP", "failed when publish to peer." + url, e); + } + } + }); + } + } + +2:759a +3:759a +====1 +1:984c + if (Switch.getDisableAddIP()) { +2:764c +3:764c + if (switchDomain.isDisableAddIP()) { +====1 +1:988,989c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); +2:767a +3:767a +====1 +1:999a +2:778,784c +3:778,784c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String clusterName = WebUtils.required(request, "clusterName"); + +====1 +1:1005c + ipList = Arrays.asList(ipListString); +2:789a +3:789a +====1 +1:1016,1110c + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/addIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to add ip for dom, caused {}", result1.content); + throw new IllegalArgumentException("failed to add ip for dom, caused " + result1.content); + } + + return "ok"; + } + + final String dom = WebUtils.required(request, "dom"); + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + boolean updateOnly = Boolean.parseBoolean(WebUtils.optional(request, "updateOnly", "false")); + + if (CollectionUtils.isEmpty(newIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + if (updateOnly) { + //make sure every IP is in the dom, otherwise refuse update + List oldIPs = domain.allIPs(); + Collection diff = CollectionUtils.subtract(newIPs, oldIPs); + if (diff.size() != 0) { + throw new IllegalArgumentException("these IPs are not present: " + Arrays.toString(diff.toArray()) + + ", if you want to add them, remove updateOnly flag"); + } + } + + String key = UtilsAndCommons.getIPListStoreKey(domain); + + Datum datum = RaftCore.getDatum(key); + if (datum == null) { + try { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).lock(); + datum = RaftCore.getDatum(key); + if (datum == null) { + datum = new Datum(); + datum.key = key; + RaftCore.addDatum(datum); + } + } finally { + domainsManager.getDom2LockMap().get(UtilsAndCommons.assembleFullServiceName(namespaceId, dom)).unlock(); + } + } + + long timestamp = RaftCore.getDatum(key).timestamp.get(); + + if (RaftCore.isLeader()) { + try { + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onAddIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnAddIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnAddIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + } +2:800c +3:800c + serviceManager.addInstance(namespaceId, serviceName, clusterName, newIPs.toArray(new IpAddress[newIPs.size()])); +====1 +1:1119c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:809c +3:809c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:1127c + long cacheMillis = Switch.getCacheMillis(dom); +2:817c +3:817c + long cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1131,1132c + if (udpPort > 0 && PushService.canEnablePush(agent)) { + PushService.addClient(namespaceId, dom, +2:821,822c +3:821,822c + if (udpPort > 0 && pushService.canEnablePush(agent)) { + pushService.addClient(namespaceId, dom, +====1 +1:1139c + cacheMillis = Switch.getPushCacheMillis(dom); +2:829c +3:829c + cacheMillis = switchDomain.getPushCacheMillis(dom); +====1 +1:1143c + cacheMillis = Switch.getCacheMillis(dom); +2:833c +3:833c + cacheMillis = switchDomain.getDefaultCacheMillis(); +====1 +1:1233,1274c + @RequestMapping("/onRemvIP4Dom") + public void onRemvIP4Dom(HttpServletRequest request) throws Exception { + if (Switch.getDisableAddIP()) { + throw new AccessControlException("Deleting IP for dom is forbidden now."); + } + + String clientIP = WebUtils.required(request, "clientIP"); + long term = Long.parseLong(WebUtils.required(request, "term")); + + if (!RaftCore.isLeader(clientIP)) { + Loggers.RAFT.warn("peer(" + JSON.toJSONString(clientIP) + ") tried to publish " + + "data but wasn't leader, leader: " + JSON.toJSONString(RaftCore.getLeader())); + throw new IllegalStateException("peer(" + clientIP + ") tried to publish " + + "data but wasn't leader"); + } + + if (term < RaftCore.getPeerSet().local().term.get()) { + Loggers.RAFT.warn("out of date publish, pub-term: " + + JSON.toJSONString(clientIP) + ", cur-term: " + JSON.toJSONString(RaftCore.getPeerSet().local())); + throw new IllegalStateException("out of date publish, pub-term:" + + term + ", cur-term: " + RaftCore.getPeerSet().local().term); + } + + RaftCore.getPeerSet().local().resetLeaderDue(); + + final String dom = WebUtils.required(request, "dom"); + final String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + if (domainsManager.getDomain(namespaceId, dom) == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + List removedIPs = getIpAddresses(request); + + if (CollectionUtils.isEmpty(removedIPs)) { + throw new IllegalArgumentException("Empty ip list"); + } + + domainsManager.easyRemvIP4Dom(namespaceId, dom, removedIPs, term); + } + +2:922a +3:922a +====1 +1:1279,1280c + if (DistroMapper.getLocalhostIP().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + DistroMapper.getLocalhostIP()); +2:927,928c +3:927,928c + if (NetUtils.localServer().equals(UtilsAndCommons.LOCAL_HOST_IP)) { + throw new Exception("invalid localhost ip: " + NetUtils.localServer()); +====1 +1:1308,1314c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + String dom = WebUtils.required(request, "dom"); + String ipListString = WebUtils.required(request, "ipList"); + + if (Loggers.DEBUG_LOG.isDebugEnabled()) { + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: serviceName: {}, iplist: {}", dom, ipListString); +2:956,957c +3:956,957c + if (switchDomain.isDisableAddIP()) { + throw new AccessControlException("Adding IP for dom is forbidden now."); +====1 +1:1323c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments, params: {}", proxyParams); +2:966c +3:966c + Loggers.DEBUG_LOG.debug("[REMOVE-IP] full arguments: {}", proxyParams); +====1 +1:1326c + List ipList = new ArrayList<>(); +2:969,976c +3:969,976c + String namespaceId = WebUtils.optional(request, Constants.REQUEST_PARAM_NAMESPACE_ID, + UtilsAndCommons.getDefaultNamespaceId()); + + String serviceName = WebUtils.required(request, Constants.REQUEST_PARAM_SERVICE_NAME); + + String ipListString = WebUtils.required(request, "ipList"); + final List ipList; + List removedIPs = new ArrayList<>(); +====1 +1:1328c + List ipObjList = new ArrayList<>(ipList.size()); +2:977a +3:977a +====1 +1:1330,1331c + ipList = Arrays.asList(ipListString); + ipObjList = JSON.parseObject(ipListString, new TypeReference>() { +2:979c +3:979c + removedIPs = JSON.parseObject(ipListString, new TypeReference>() { +====1 +1:1336,1359c + ipObjList.add(IpAddress.fromJSON(ip)); + } + } + + if (!RaftCore.isLeader()) { + Loggers.RAFT.info("I'm not leader, will proxy to leader."); + if (RaftCore.getLeader() == null) { + throw new IllegalArgumentException("no leader now."); + } + + RaftPeer leader = RaftCore.getLeader(); + + String server = leader.ip; + if (!server.contains(UtilsAndCommons.CLUSTER_CONF_IP_SPLITER)) { + server = server + UtilsAndCommons.CLUSTER_CONF_IP_SPLITER + RunningConfig.getServerPort(); + } + + String url = "http://" + server + + RunningConfig.getContextPath() + UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/remvIP4Dom"; + HttpClient.HttpResult result1 = HttpClient.httpPost(url, null, proxyParams); + + if (result1.code != HttpURLConnection.HTTP_OK) { + Loggers.SRV_LOG.warn("failed to remove ip for dom, caused: {}", result1.content); + throw new IllegalArgumentException("failed to remove ip for dom, caused " + result1.content); +2:984,985c +3:984,985c + IpAddress ipAddr = IpAddress.fromJSON(ip); + removedIPs.add(ipAddr); +====1 +1:1361,1379c + + return "ok"; + } + + VirtualClusterDomain domain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); + + if (domain == null) { + throw new IllegalStateException("dom doesn't exist: " + dom); + } + + if (CollectionUtils.isEmpty(ipObjList)) { + throw new IllegalArgumentException("Empty ip list"); + } + + String key = UtilsAndCommons.getIPListStoreKey(domainsManager.getDomain(namespaceId, dom)); + + long timestamp = 1; + if (RaftCore.getDatum(key) != null) { + timestamp = RaftCore.getDatum(key).timestamp.get(); +2:986a +3:986a +====1 +1:1382,1412c + if (RaftCore.isLeader()) { + + try { + + RaftCore.OPERATE_LOCK.lock(); + + OverrideParameterRequestWrapper requestWrapper = OverrideParameterRequestWrapper.buildRequest(request); + requestWrapper.addParameter("clientIP", NetUtils.localServer()); + requestWrapper.addParameter("notify", "true"); + requestWrapper.addParameter("term", String.valueOf(RaftCore.getPeerSet().local().term)); + requestWrapper.addParameter("timestamp", String.valueOf(timestamp)); + + onRemvIP4Dom(requestWrapper); + + proxyParams.put("clientIP", NetUtils.localServer()); + proxyParams.put("notify", "true"); + proxyParams.put("term", String.valueOf(RaftCore.getPeerSet().local().term)); + proxyParams.put("timestamp", String.valueOf(timestamp)); + + if (domain.getEnableHealthCheck() && !domain.getEnableClientBeat()) { + syncOnRemvIP4Dom(namespaceId, dom, proxyParams); + } else { + asyncOnRemvIP4Dom(proxyParams); + } + } finally { + RaftCore.OPERATE_LOCK.unlock(); + } + + Loggers.EVT_LOG.info("dom: {} {POS} {IP-REMV} new: {} operatorIP: {}", + dom, ipListString, WebUtils.optional(request, "clientIP", "unknown")); + } +2:989c +3:989c + serviceManager.removeInstance(namespaceId, serviceName, removedIPs.toArray(new IpAddress[removedIPs.size()])); +====1 +1:1426,1428c + int failedPushCount = PushService.getFailedPushCount(); + result.put("succeed", PushService.getTotalPush() - failedPushCount); + result.put("total", PushService.getTotalPush()); +2:1003,1005c +3:1003,1005c + int failedPushCount = pushService.getFailedPushCount(); + result.put("succeed", pushService.getTotalPush() - failedPushCount); + result.put("total", pushService.getTotalPush()); +====1 +1:1430,1431c + if (PushService.getTotalPush() > 0) { + result.put("ratio", ((float) PushService.getTotalPush() - failedPushCount) / PushService.getTotalPush()); +2:1007,1008c +3:1007,1008c + if (pushService.getTotalPush() > 0) { + result.put("ratio", ((float) pushService.getTotalPush() - failedPushCount) / pushService.getTotalPush()); +====1 +1:1457,1459c + + ReentrantLock lock = new ReentrantLock(); + +2:1033a +3:1033a +====1 +1:1463a +2:1038,1039c +3:1038,1039c + String entry = WebUtils.required(request, "entry"); + String value = WebUtils.required(request, "value"); +====1 +1:1465,1846c + if (!RaftCore.isLeader() && !debug) { + Map tmpParams = new HashMap<>(16); + for (Map.Entry entry : request.getParameterMap().entrySet()) { + tmpParams.put(entry.getKey(), entry.getValue()[0]); + } + + RaftProxy.proxyGET(UtilsAndCommons.NACOS_NAMING_CONTEXT + "/api/updateSwitch", tmpParams); + return "ok"; + } + + try { + lock.lock(); + String entry = WebUtils.required(request, "entry"); + + Datum datum = RaftCore.getDatum(UtilsAndCommons.DOMAINS_DATA_ID_PRE + UtilsAndCommons.SWITCH_DOMAIN_NAME); + SwitchDomain switchDomain = null; + + if (datum != null) { + switchDomain = JSON.parseObject(datum.value, SwitchDomain.class); + } else { + Loggers.SRV_LOG.warn("datum: {}{} is null", UtilsAndCommons.DOMAINS_DATA_ID_PRE, UtilsAndCommons.SWITCH_DOMAIN_NAME); + } + + if (SwitchEntry.BATCH.equals(entry)) { + //batch update + SwitchDomain dom = JSON.parseObject(WebUtils.required(request, "json"), SwitchDomain.class); + dom.setEnableStandalone(Switch.isEnableStandalone()); + if (dom.httpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN + || dom.tcpHealthParams.getMin() < SwitchDomain.HttpHealthParams.MIN_MIN) { + + throw new IllegalArgumentException("min check time for http or tcp is too small(<500)"); + } + + if (dom.httpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX + || dom.tcpHealthParams.getMax() < SwitchDomain.HttpHealthParams.MIN_MAX) { + + throw new IllegalArgumentException("max check time for http or tcp is too small(<3000)"); + } + + if (dom.httpHealthParams.getFactor() < 0 + || dom.httpHealthParams.getFactor() > 1 + || dom.tcpHealthParams.getFactor() < 0 + || dom.tcpHealthParams.getFactor() > 1) { + + throw new IllegalArgumentException("malformed factor"); + } + + Switch.setDom(dom); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (switchDomain != null) { + Switch.setDom(switchDomain); + } + + if (entry.equals(SwitchEntry.DISTRO_THRESHOLD)) { + Float threshold = Float.parseFloat(WebUtils.required(request, "distroThreshold")); + + if (threshold <= 0) { + throw new IllegalArgumentException("distroThreshold can not be zero or negative: " + threshold); + } + + Switch.setDistroThreshold(threshold); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + + if (entry.equals(SwitchEntry.ENABLE_ALL_DOM_NAME_CACHE)) { + Boolean enable = Boolean.parseBoolean(WebUtils.required(request, "enableAllDomNameCache")); + Switch.setAllDomNameCache(enable); + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.INCREMENTAL_LIST)) { + String action = WebUtils.required(request, "action"); + List doms = Arrays.asList(WebUtils.required(request, "incrementalList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getIncrementalList().addAll(doms); + } else if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getIncrementalList().removeAll(doms); + } else { + throw new IllegalArgumentException("action is not allowed: " + action); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_WHITLE_LIST)) { + String action = WebUtils.required(request, "action"); + List whiteList = Arrays.asList(WebUtils.required(request, "healthCheckWhiteList").split(",")); + + if (action.equals(SwitchEntry.ACTION_UPDATE)) { + Switch.getHealthCheckWhiteList().addAll(whiteList); + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + if (action.equals(SwitchEntry.ACTION_DELETE)) { + Switch.getHealthCheckWhiteList().removeAll(whiteList); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.CLIENT_BEAT_INTERVAL)) { + long clientBeatInterval = Long.parseLong(WebUtils.required(request, "clientBeatInterval")); + Switch.setClientBeatInterval(clientBeatInterval); + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setPushJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setPushPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setPushCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_GO, type)) { + Switch.setPushGoVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.TRAFFIC_SCHEDULING_VERSION)) { + String type = WebUtils.required(request, "type"); + String version = WebUtils.required(request, "version"); + + if (!version.matches(UtilsAndCommons.VERSION_STRING_SYNTAX)) { + throw new IllegalArgumentException("illegal version, must match: " + UtilsAndCommons.VERSION_STRING_SYNTAX); + } + + if (StringUtils.equals(SwitchEntry.CLIENT_JAVA, type)) { + Switch.setTrafficSchedulingJavaVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_PYTHON, type)) { + Switch.setTrafficSchedulingPythonVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_C, type)) { + Switch.setTrafficSchedulingCVersion(version); + } else if (StringUtils.equals(SwitchEntry.CLIENT_TENGINE, type)) { + Switch.setTrafficSchedulingTengineVersion(version); + } else { + throw new IllegalArgumentException("unsupported client type: " + type); + } + + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.PUSH_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_PUSH_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min cache time for http or tcp is too small(<10000)"); + } + + Switch.setPushCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + // extremely careful while modifying this, cause it will affect all clients without pushing enabled + if (entry.equals(SwitchEntry.DEFAULT_CACHE_MILLIS)) { + String dom = WebUtils.optional(request, "dom", StringUtils.EMPTY); + Long cacheMillis = Long.parseLong(WebUtils.required(request, "millis")); + + if (cacheMillis < SwitchEntry.MIN_CACHE_TIME_MIILIS) { + throw new IllegalArgumentException("min default cache time is too small(<1000)"); + } + + Switch.setCacheMillis(dom, cacheMillis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.MASTERS)) { + List masters = Arrays.asList(WebUtils.required(request, "names").split(",")); + + Switch.setMasters(masters); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISTRO)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setDistroEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.CHECK)) { + boolean enabled = Boolean.parseBoolean(WebUtils.required(request, "enabled")); + + Switch.setHeathCheckEnabled(enabled); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DEFAULT_HEALTH_CHECK_MODE)) { + String defaultHealthCheckMode = WebUtils.required(request, "mode"); + + Switch.setDefaultHealthCheckMode(defaultHealthCheckMode); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DOM_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_DOM_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("domStatusSynchronizationPeriodMillis is too small(<5000)"); + } + + Switch.setDomStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SERVER_STATUS_SYNC_PERIOD)) { + Long millis = Long.parseLong(WebUtils.required(request, "millis")); + + if (millis < SwitchEntry.MIN_SERVER_SYNC_TIME_MIILIS) { + throw new IllegalArgumentException("serverStatusSynchronizationPeriodMillis is too small(<15000)"); + } + + Switch.setServerStatusSynchronizationPeriodMillis(millis); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.HEALTH_CHECK_TIMES)) { + Integer times = Integer.parseInt(WebUtils.required(request, "times")); + + Switch.setCheckTimes(times); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.DISABLE_ADD_IP)) { + boolean disableAddIP = Boolean.parseBoolean(WebUtils.required(request, "disableAddIP")); + + Switch.setDisableAddIP(disableAddIP); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.ENABLE_CACHE)) { + boolean enableCache = Boolean.parseBoolean(WebUtils.required(request, "enableCache")); + + Switch.setEnableCache(enableCache); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.SEND_BEAT_ONLY)) { + boolean sendBeatOnly = Boolean.parseBoolean(WebUtils.required(request, "sendBeatOnly")); + + Switch.setSendBeatOnly(sendBeatOnly); + if (!debug) { + Switch.save(); + } + return "ok"; + } + + if (entry.equals(SwitchEntry.LIMITED_URL_MAP)) { + Map limitedUrlMap = new HashMap<>(16); + String limitedUrls = WebUtils.required(request, "limitedUrls"); + + if (!StringUtils.isEmpty(limitedUrls)) { + String[] entries = limitedUrls.split(","); + for (int i = 0; i < entries.length; i++) { + String[] parts = entries[i].split(":"); + if (parts.length < 2) { + throw new IllegalArgumentException("invalid input for limited urls"); + } + + String limitedUrl = parts[0]; + if (StringUtils.isEmpty(limitedUrl)) { + throw new IllegalArgumentException("url can not be empty, url: " + limitedUrl); + } + + int statusCode = Integer.parseInt(parts[1]); + if (statusCode <= 0) { + throw new IllegalArgumentException("illegal normal status code: " + statusCode); + } + + limitedUrlMap.put(limitedUrl, statusCode); + + } + + Switch.setLimitedUrlMap(limitedUrlMap); + if (!debug) { + Switch.save(); + } + return "ok"; + } + } + + if (entry.equals(SwitchEntry.ENABLE_STANDALONE)) { + String enable = WebUtils.required(request, "enableStandalone"); + + if (!StringUtils.isNotEmpty(enable)) { + Switch.setEnableStandalone(Boolean.parseBoolean(enable)); + } + + if (!debug) { + Switch.save(); + } + + return "ok"; + } + + + throw new IllegalArgumentException("update entry not found: " + entry); + } finally { + lock.unlock(); + } + +2:1041c +3:1041c + switchManager.update(entry, value, debug); +====1 +1:1847a +2:1043c +3:1043c + return "ok"; +====1 +1:1858c + return JSON.parseObject(Switch.getDom().toJSON()); +2:1054c +3:1054c + return JSON.parseObject(switchDomain.toJSON()); +====1 +1:1906c + Map> domMap = domainsManager.getAllDomNames(); +2:1102c +3:1102c + Map> domMap = serviceManager.getAllDomNames(); +====1 +1:1911c + if (DistroMapper.responsible(dom) || !responsibleOnly) { +2:1107c +3:1107c + if (distroMapper.responsible(dom) || !responsibleOnly) { +====1 +1:1933,1934c + List doms + = domainsManager.searchDomains(namespaceId, ".*" + expr + ".*"); +2:1129,1130c +3:1129,1130c + List doms + = serviceManager.searchDomains(namespaceId, ".*" + expr + ".*"); +====1 +1:1980c + VirtualClusterDomain domObj = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1176c +3:1176c + VirtualClusterDomain domObj = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2063c + domainsManager.easyAddOrReplaceDom(domObj); +2:1259c +3:1259c + serviceManager.addOrReplaceService(domObj); +====1 +1:2082c + result.put("status", DistroMapper.getDistroConfig()); +2:1278c +3:1278c + result.put("status", distroMapper.getDistroConfig()); +====1 +1:2087c + DistroMapper.clean(); +2:1283c +3:1283c + distroMapper.clean(); +====1 +1:2099,2100c + int domCount = domainsManager.getDomCount(); + int ipCount = domainsManager.getInstanceCount(); +2:1295,1296c +3:1295,1296c + int domCount = serviceManager.getDomCount(); + int ipCount = serviceManager.getInstanceCount(); +====1 +1:2102,2103c + int responsibleDomCount = domainsManager.getResponsibleDomCount(); + int responsibleIPCount = domainsManager.getResponsibleIPCount(); +2:1298,1299c +3:1298,1299c + int responsibleDomCount = serviceManager.getResponsibleDomCount(); + int responsibleIPCount = serviceManager.getResponsibleIPCount(); +====1 +1:2112c + result.put("notifyTask", RaftCore.notifier.getTaskSize()); +2:1307a +3:1307a +====1 +1:2198c + DistroMapper.onReceiveServerStatus(serverStatus); +2:1393c +3:1393c + distroMapper.onReceiveServerStatus(serverStatus); +====1 +1:2209c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1404c +3:1404c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2229c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1424c +3:1424c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2237c + result.put("responsibleServer", DistroMapper.mapSrv(dom)); +2:1432c +3:1432c + result.put("responsibleServer", distroMapper.mapSrv(dom)); +====1 +1:2246c + result.put("healthyList", DistroMapper.getHealthyList()); +2:1441c +3:1441c + result.put("healthyList", distroMapper.getHealthyList()); +====1 +1:2256c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) domainsManager.getDomain(namespaceId, dom); +2:1451c +3:1451c + VirtualClusterDomain virtualClusterDomain = (VirtualClusterDomain) serviceManager.getService(namespaceId, dom); +====1 +1:2264c + result.put("responsible", DistroMapper.responsible(dom)); +2:1459c +3:1459c + result.put("responsible", distroMapper.responsible(dom)); +====1 +1:2275c + if (!NamingProxy.getServers().contains(serverIP)) { +2:1470c +3:1470c + if (!serverListManager.contains(serverIP)) { +====1 +1:2280c + DomainsManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, DomainsManager.DomainChecksum.class); +2:1475c +3:1475c + ServiceManager.DomainChecksum checksums = JSON.parseObject(domsStatusString, ServiceManager.DomainChecksum.class); +====1 +1:2292c + Domain domain = domainsManager.getDomain(checksums.namespaceId, dom); +2:1487c +3:1487c + Domain domain = serviceManager.getService(checksums.namespaceId, dom); +====1 +1:2305c + domainsManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +2:1500c +3:1500c + serviceManager.addUpdatedDom2Queue(checksums.namespaceId, dom, serverIP, checksum); +====1 +1:2354c + pac.put("checkServer", DistroMapper.mapSrvName(vDom.getName())); +2:1549c +3:1549c + pac.put("checkServer", distroMapper.mapSrvName(vDom.getName())); +====1 +1:2432,2433c + public void setDomainsManager(DomainsManager domainsManager) { + this.domainsManager = domainsManager; +2:1627,1628c +3:1627,1628c + public void setServiceManager(ServiceManager serviceManager) { + this.serviceManager = serviceManager; diff --git a/src/python/merge_conflict_analysis_diffs/1329/git_hires_merge/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1329/git_hires_merge/diff_pom.xml.txt new file mode 100644 index 0000000000..1d5f75309c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1329/git_hires_merge/diff_pom.xml.txt @@ -0,0 +1,144 @@ +====1 +1:6c + 2.4.1-SNAPSHOT +2:6c +3:6c + 3.4.1-SNAPSHOT +====1 +1:9,10c + Rich and fluent assertions for testing + 2013 +2:9,10c +3:9,10c + Rich and fluent assertions for testing for Java + 2014 +====1 +1:33a +2:34,37c +3:34,37c + + -Xdoclint:none + 1.6.4 + +====1 +1:47c + 2.2.2 +2:51,57c +3:51,57c + 3.2.0 + true + + + org.ow2.asm + asm + 5.0.4 +====1 +1:49a +2:60,77c +3:60,77c + + org.powermock + powermock-module-junit4 + ${powermock.version} + test + + + org.powermock + powermock-api-mockito + ${powermock.version} + test + + + org.easymock + easymock + 3.4 + test + +====1 +1:80,81c + 1.7 + 1.7 +2:108,109c +3:108,109c + 1.8 + 1.8 +====1 +1:108a +2:137,140c +3:137,140c + + org.objectweb.asm + org.assertj.core.internal.asm + +====1 +1:154,158c + + org.assertj.core.* + + + JavaSE-1.7 +2:186,187c +3:186,187c + org.assertj.core.* + JavaSE-1.8 +====1 +1:169,171c + + **/*cglib*/** + +2:197a +3:197a +====1 +1:197a +2:224,232c +3:224,232c + org.codehaus.mojo + animal-sniffer-maven-plugin + 1.14 + + + true + + + +====1 +1:215a +2:251,279c +3:251,279c + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + com.mycila + license-maven-plugin + [2.6,) + + format + + + + + + + + + + + + +====1 +1:217,220c + + + + +2:280a +3:280a diff --git a/src/python/merge_conflict_analysis_diffs/1329/gitmerge_ort/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1329/gitmerge_ort/diff_pom.xml.txt new file mode 100644 index 0000000000..bed4273286 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1329/gitmerge_ort/diff_pom.xml.txt @@ -0,0 +1,151 @@ +==== +1:6c + 2.4.1-SNAPSHOT +2:6,12c + <<<<<<< HEAD + 3.4.1-SNAPSHOT + ||||||| a048d5e1d + 2.4.1-SNAPSHOT + ======= + 2.4.2-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:6c + 3.4.1-SNAPSHOT +====1 +1:9,10c + Rich and fluent assertions for testing + 2013 +2:15,16c +3:9,10c + Rich and fluent assertions for testing for Java + 2014 +====1 +1:33a +2:40,43c +3:34,37c + + -Xdoclint:none + 1.6.4 + +====1 +1:47c + 2.2.2 +2:57,63c +3:51,57c + 3.2.0 + true + + + org.ow2.asm + asm + 5.0.4 +====1 +1:49a +2:66,83c +3:60,77c + + org.powermock + powermock-module-junit4 + ${powermock.version} + test + + + org.powermock + powermock-api-mockito + ${powermock.version} + test + + + org.easymock + easymock + 3.4 + test + +====1 +1:80,81c + 1.7 + 1.7 +2:114,115c +3:108,109c + 1.8 + 1.8 +====1 +1:108a +2:143,146c +3:137,140c + + org.objectweb.asm + org.assertj.core.internal.asm + +====1 +1:154,158c + + org.assertj.core.* + + + JavaSE-1.7 +2:192,193c +3:186,187c + org.assertj.core.* + JavaSE-1.8 +====1 +1:169,171c + + **/*cglib*/** + +2:203a +3:197a +====1 +1:197a +2:230,238c +3:224,232c + org.codehaus.mojo + animal-sniffer-maven-plugin + 1.14 + + + true + + + +====1 +1:215a +2:257,285c +3:251,279c + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + com.mycila + license-maven-plugin + [2.6,) + + format + + + + + + + + + + + + +====1 +1:217,220c + + + + +2:286a +3:280a diff --git a/src/python/merge_conflict_analysis_diffs/1329/gitmerge_ort_adjacent/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1329/gitmerge_ort_adjacent/diff_pom.xml.txt new file mode 100644 index 0000000000..1d5f75309c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1329/gitmerge_ort_adjacent/diff_pom.xml.txt @@ -0,0 +1,144 @@ +====1 +1:6c + 2.4.1-SNAPSHOT +2:6c +3:6c + 3.4.1-SNAPSHOT +====1 +1:9,10c + Rich and fluent assertions for testing + 2013 +2:9,10c +3:9,10c + Rich and fluent assertions for testing for Java + 2014 +====1 +1:33a +2:34,37c +3:34,37c + + -Xdoclint:none + 1.6.4 + +====1 +1:47c + 2.2.2 +2:51,57c +3:51,57c + 3.2.0 + true + + + org.ow2.asm + asm + 5.0.4 +====1 +1:49a +2:60,77c +3:60,77c + + org.powermock + powermock-module-junit4 + ${powermock.version} + test + + + org.powermock + powermock-api-mockito + ${powermock.version} + test + + + org.easymock + easymock + 3.4 + test + +====1 +1:80,81c + 1.7 + 1.7 +2:108,109c +3:108,109c + 1.8 + 1.8 +====1 +1:108a +2:137,140c +3:137,140c + + org.objectweb.asm + org.assertj.core.internal.asm + +====1 +1:154,158c + + org.assertj.core.* + + + JavaSE-1.7 +2:186,187c +3:186,187c + org.assertj.core.* + JavaSE-1.8 +====1 +1:169,171c + + **/*cglib*/** + +2:197a +3:197a +====1 +1:197a +2:224,232c +3:224,232c + org.codehaus.mojo + animal-sniffer-maven-plugin + 1.14 + + + true + + + +====1 +1:215a +2:251,279c +3:251,279c + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + com.mycila + license-maven-plugin + [2.6,) + + format + + + + + + + + + + + + +====1 +1:217,220c + + + + +2:280a +3:280a diff --git a/src/python/merge_conflict_analysis_diffs/1329/gitmerge_ort_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1329/gitmerge_ort_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..bed4273286 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1329/gitmerge_ort_ignorespace/diff_pom.xml.txt @@ -0,0 +1,151 @@ +==== +1:6c + 2.4.1-SNAPSHOT +2:6,12c + <<<<<<< HEAD + 3.4.1-SNAPSHOT + ||||||| a048d5e1d + 2.4.1-SNAPSHOT + ======= + 2.4.2-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:6c + 3.4.1-SNAPSHOT +====1 +1:9,10c + Rich and fluent assertions for testing + 2013 +2:15,16c +3:9,10c + Rich and fluent assertions for testing for Java + 2014 +====1 +1:33a +2:40,43c +3:34,37c + + -Xdoclint:none + 1.6.4 + +====1 +1:47c + 2.2.2 +2:57,63c +3:51,57c + 3.2.0 + true + + + org.ow2.asm + asm + 5.0.4 +====1 +1:49a +2:66,83c +3:60,77c + + org.powermock + powermock-module-junit4 + ${powermock.version} + test + + + org.powermock + powermock-api-mockito + ${powermock.version} + test + + + org.easymock + easymock + 3.4 + test + +====1 +1:80,81c + 1.7 + 1.7 +2:114,115c +3:108,109c + 1.8 + 1.8 +====1 +1:108a +2:143,146c +3:137,140c + + org.objectweb.asm + org.assertj.core.internal.asm + +====1 +1:154,158c + + org.assertj.core.* + + + JavaSE-1.7 +2:192,193c +3:186,187c + org.assertj.core.* + JavaSE-1.8 +====1 +1:169,171c + + **/*cglib*/** + +2:203a +3:197a +====1 +1:197a +2:230,238c +3:224,232c + org.codehaus.mojo + animal-sniffer-maven-plugin + 1.14 + + + true + + + +====1 +1:215a +2:257,285c +3:251,279c + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + com.mycila + license-maven-plugin + [2.6,) + + format + + + + + + + + + + + + +====1 +1:217,220c + + + + +2:286a +3:280a diff --git a/src/python/merge_conflict_analysis_diffs/1329/gitmerge_ort_imports/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1329/gitmerge_ort_imports/diff_pom.xml.txt new file mode 100644 index 0000000000..1d5f75309c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1329/gitmerge_ort_imports/diff_pom.xml.txt @@ -0,0 +1,144 @@ +====1 +1:6c + 2.4.1-SNAPSHOT +2:6c +3:6c + 3.4.1-SNAPSHOT +====1 +1:9,10c + Rich and fluent assertions for testing + 2013 +2:9,10c +3:9,10c + Rich and fluent assertions for testing for Java + 2014 +====1 +1:33a +2:34,37c +3:34,37c + + -Xdoclint:none + 1.6.4 + +====1 +1:47c + 2.2.2 +2:51,57c +3:51,57c + 3.2.0 + true + + + org.ow2.asm + asm + 5.0.4 +====1 +1:49a +2:60,77c +3:60,77c + + org.powermock + powermock-module-junit4 + ${powermock.version} + test + + + org.powermock + powermock-api-mockito + ${powermock.version} + test + + + org.easymock + easymock + 3.4 + test + +====1 +1:80,81c + 1.7 + 1.7 +2:108,109c +3:108,109c + 1.8 + 1.8 +====1 +1:108a +2:137,140c +3:137,140c + + org.objectweb.asm + org.assertj.core.internal.asm + +====1 +1:154,158c + + org.assertj.core.* + + + JavaSE-1.7 +2:186,187c +3:186,187c + org.assertj.core.* + JavaSE-1.8 +====1 +1:169,171c + + **/*cglib*/** + +2:197a +3:197a +====1 +1:197a +2:224,232c +3:224,232c + org.codehaus.mojo + animal-sniffer-maven-plugin + 1.14 + + + true + + + +====1 +1:215a +2:251,279c +3:251,279c + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + com.mycila + license-maven-plugin + [2.6,) + + format + + + + + + + + + + + + +====1 +1:217,220c + + + + +2:280a +3:280a diff --git a/src/python/merge_conflict_analysis_diffs/1329/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1329/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..1d5f75309c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1329/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt @@ -0,0 +1,144 @@ +====1 +1:6c + 2.4.1-SNAPSHOT +2:6c +3:6c + 3.4.1-SNAPSHOT +====1 +1:9,10c + Rich and fluent assertions for testing + 2013 +2:9,10c +3:9,10c + Rich and fluent assertions for testing for Java + 2014 +====1 +1:33a +2:34,37c +3:34,37c + + -Xdoclint:none + 1.6.4 + +====1 +1:47c + 2.2.2 +2:51,57c +3:51,57c + 3.2.0 + true + + + org.ow2.asm + asm + 5.0.4 +====1 +1:49a +2:60,77c +3:60,77c + + org.powermock + powermock-module-junit4 + ${powermock.version} + test + + + org.powermock + powermock-api-mockito + ${powermock.version} + test + + + org.easymock + easymock + 3.4 + test + +====1 +1:80,81c + 1.7 + 1.7 +2:108,109c +3:108,109c + 1.8 + 1.8 +====1 +1:108a +2:137,140c +3:137,140c + + org.objectweb.asm + org.assertj.core.internal.asm + +====1 +1:154,158c + + org.assertj.core.* + + + JavaSE-1.7 +2:186,187c +3:186,187c + org.assertj.core.* + JavaSE-1.8 +====1 +1:169,171c + + **/*cglib*/** + +2:197a +3:197a +====1 +1:197a +2:224,232c +3:224,232c + org.codehaus.mojo + animal-sniffer-maven-plugin + 1.14 + + + true + + + +====1 +1:215a +2:251,279c +3:251,279c + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + com.mycila + license-maven-plugin + [2.6,) + + format + + + + + + + + + + + + +====1 +1:217,220c + + + + +2:280a +3:280a diff --git a/src/python/merge_conflict_analysis_diffs/1329/gitmerge_recursive_histogram/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1329/gitmerge_recursive_histogram/diff_pom.xml.txt new file mode 100644 index 0000000000..bed4273286 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1329/gitmerge_recursive_histogram/diff_pom.xml.txt @@ -0,0 +1,151 @@ +==== +1:6c + 2.4.1-SNAPSHOT +2:6,12c + <<<<<<< HEAD + 3.4.1-SNAPSHOT + ||||||| a048d5e1d + 2.4.1-SNAPSHOT + ======= + 2.4.2-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:6c + 3.4.1-SNAPSHOT +====1 +1:9,10c + Rich and fluent assertions for testing + 2013 +2:15,16c +3:9,10c + Rich and fluent assertions for testing for Java + 2014 +====1 +1:33a +2:40,43c +3:34,37c + + -Xdoclint:none + 1.6.4 + +====1 +1:47c + 2.2.2 +2:57,63c +3:51,57c + 3.2.0 + true + + + org.ow2.asm + asm + 5.0.4 +====1 +1:49a +2:66,83c +3:60,77c + + org.powermock + powermock-module-junit4 + ${powermock.version} + test + + + org.powermock + powermock-api-mockito + ${powermock.version} + test + + + org.easymock + easymock + 3.4 + test + +====1 +1:80,81c + 1.7 + 1.7 +2:114,115c +3:108,109c + 1.8 + 1.8 +====1 +1:108a +2:143,146c +3:137,140c + + org.objectweb.asm + org.assertj.core.internal.asm + +====1 +1:154,158c + + org.assertj.core.* + + + JavaSE-1.7 +2:192,193c +3:186,187c + org.assertj.core.* + JavaSE-1.8 +====1 +1:169,171c + + **/*cglib*/** + +2:203a +3:197a +====1 +1:197a +2:230,238c +3:224,232c + org.codehaus.mojo + animal-sniffer-maven-plugin + 1.14 + + + true + + + +====1 +1:215a +2:257,285c +3:251,279c + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + com.mycila + license-maven-plugin + [2.6,) + + format + + + + + + + + + + + + +====1 +1:217,220c + + + + +2:286a +3:280a diff --git a/src/python/merge_conflict_analysis_diffs/1329/gitmerge_recursive_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1329/gitmerge_recursive_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..bed4273286 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1329/gitmerge_recursive_ignorespace/diff_pom.xml.txt @@ -0,0 +1,151 @@ +==== +1:6c + 2.4.1-SNAPSHOT +2:6,12c + <<<<<<< HEAD + 3.4.1-SNAPSHOT + ||||||| a048d5e1d + 2.4.1-SNAPSHOT + ======= + 2.4.2-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:6c + 3.4.1-SNAPSHOT +====1 +1:9,10c + Rich and fluent assertions for testing + 2013 +2:15,16c +3:9,10c + Rich and fluent assertions for testing for Java + 2014 +====1 +1:33a +2:40,43c +3:34,37c + + -Xdoclint:none + 1.6.4 + +====1 +1:47c + 2.2.2 +2:57,63c +3:51,57c + 3.2.0 + true + + + org.ow2.asm + asm + 5.0.4 +====1 +1:49a +2:66,83c +3:60,77c + + org.powermock + powermock-module-junit4 + ${powermock.version} + test + + + org.powermock + powermock-api-mockito + ${powermock.version} + test + + + org.easymock + easymock + 3.4 + test + +====1 +1:80,81c + 1.7 + 1.7 +2:114,115c +3:108,109c + 1.8 + 1.8 +====1 +1:108a +2:143,146c +3:137,140c + + org.objectweb.asm + org.assertj.core.internal.asm + +====1 +1:154,158c + + org.assertj.core.* + + + JavaSE-1.7 +2:192,193c +3:186,187c + org.assertj.core.* + JavaSE-1.8 +====1 +1:169,171c + + **/*cglib*/** + +2:203a +3:197a +====1 +1:197a +2:230,238c +3:224,232c + org.codehaus.mojo + animal-sniffer-maven-plugin + 1.14 + + + true + + + +====1 +1:215a +2:257,285c +3:251,279c + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + com.mycila + license-maven-plugin + [2.6,) + + format + + + + + + + + + + + + +====1 +1:217,220c + + + + +2:286a +3:280a diff --git a/src/python/merge_conflict_analysis_diffs/1329/gitmerge_recursive_minimal/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1329/gitmerge_recursive_minimal/diff_pom.xml.txt new file mode 100644 index 0000000000..bed4273286 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1329/gitmerge_recursive_minimal/diff_pom.xml.txt @@ -0,0 +1,151 @@ +==== +1:6c + 2.4.1-SNAPSHOT +2:6,12c + <<<<<<< HEAD + 3.4.1-SNAPSHOT + ||||||| a048d5e1d + 2.4.1-SNAPSHOT + ======= + 2.4.2-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:6c + 3.4.1-SNAPSHOT +====1 +1:9,10c + Rich and fluent assertions for testing + 2013 +2:15,16c +3:9,10c + Rich and fluent assertions for testing for Java + 2014 +====1 +1:33a +2:40,43c +3:34,37c + + -Xdoclint:none + 1.6.4 + +====1 +1:47c + 2.2.2 +2:57,63c +3:51,57c + 3.2.0 + true + + + org.ow2.asm + asm + 5.0.4 +====1 +1:49a +2:66,83c +3:60,77c + + org.powermock + powermock-module-junit4 + ${powermock.version} + test + + + org.powermock + powermock-api-mockito + ${powermock.version} + test + + + org.easymock + easymock + 3.4 + test + +====1 +1:80,81c + 1.7 + 1.7 +2:114,115c +3:108,109c + 1.8 + 1.8 +====1 +1:108a +2:143,146c +3:137,140c + + org.objectweb.asm + org.assertj.core.internal.asm + +====1 +1:154,158c + + org.assertj.core.* + + + JavaSE-1.7 +2:192,193c +3:186,187c + org.assertj.core.* + JavaSE-1.8 +====1 +1:169,171c + + **/*cglib*/** + +2:203a +3:197a +====1 +1:197a +2:230,238c +3:224,232c + org.codehaus.mojo + animal-sniffer-maven-plugin + 1.14 + + + true + + + +====1 +1:215a +2:257,285c +3:251,279c + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + com.mycila + license-maven-plugin + [2.6,) + + format + + + + + + + + + + + + +====1 +1:217,220c + + + + +2:286a +3:280a diff --git a/src/python/merge_conflict_analysis_diffs/1329/gitmerge_recursive_myers/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1329/gitmerge_recursive_myers/diff_pom.xml.txt new file mode 100644 index 0000000000..bed4273286 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1329/gitmerge_recursive_myers/diff_pom.xml.txt @@ -0,0 +1,151 @@ +==== +1:6c + 2.4.1-SNAPSHOT +2:6,12c + <<<<<<< HEAD + 3.4.1-SNAPSHOT + ||||||| a048d5e1d + 2.4.1-SNAPSHOT + ======= + 2.4.2-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:6c + 3.4.1-SNAPSHOT +====1 +1:9,10c + Rich and fluent assertions for testing + 2013 +2:15,16c +3:9,10c + Rich and fluent assertions for testing for Java + 2014 +====1 +1:33a +2:40,43c +3:34,37c + + -Xdoclint:none + 1.6.4 + +====1 +1:47c + 2.2.2 +2:57,63c +3:51,57c + 3.2.0 + true + + + org.ow2.asm + asm + 5.0.4 +====1 +1:49a +2:66,83c +3:60,77c + + org.powermock + powermock-module-junit4 + ${powermock.version} + test + + + org.powermock + powermock-api-mockito + ${powermock.version} + test + + + org.easymock + easymock + 3.4 + test + +====1 +1:80,81c + 1.7 + 1.7 +2:114,115c +3:108,109c + 1.8 + 1.8 +====1 +1:108a +2:143,146c +3:137,140c + + org.objectweb.asm + org.assertj.core.internal.asm + +====1 +1:154,158c + + org.assertj.core.* + + + JavaSE-1.7 +2:192,193c +3:186,187c + org.assertj.core.* + JavaSE-1.8 +====1 +1:169,171c + + **/*cglib*/** + +2:203a +3:197a +====1 +1:197a +2:230,238c +3:224,232c + org.codehaus.mojo + animal-sniffer-maven-plugin + 1.14 + + + true + + + +====1 +1:215a +2:257,285c +3:251,279c + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + com.mycila + license-maven-plugin + [2.6,) + + format + + + + + + + + + + + + +====1 +1:217,220c + + + + +2:286a +3:280a diff --git a/src/python/merge_conflict_analysis_diffs/1329/gitmerge_recursive_patience/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1329/gitmerge_recursive_patience/diff_pom.xml.txt new file mode 100644 index 0000000000..bed4273286 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1329/gitmerge_recursive_patience/diff_pom.xml.txt @@ -0,0 +1,151 @@ +==== +1:6c + 2.4.1-SNAPSHOT +2:6,12c + <<<<<<< HEAD + 3.4.1-SNAPSHOT + ||||||| a048d5e1d + 2.4.1-SNAPSHOT + ======= + 2.4.2-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:6c + 3.4.1-SNAPSHOT +====1 +1:9,10c + Rich and fluent assertions for testing + 2013 +2:15,16c +3:9,10c + Rich and fluent assertions for testing for Java + 2014 +====1 +1:33a +2:40,43c +3:34,37c + + -Xdoclint:none + 1.6.4 + +====1 +1:47c + 2.2.2 +2:57,63c +3:51,57c + 3.2.0 + true + + + org.ow2.asm + asm + 5.0.4 +====1 +1:49a +2:66,83c +3:60,77c + + org.powermock + powermock-module-junit4 + ${powermock.version} + test + + + org.powermock + powermock-api-mockito + ${powermock.version} + test + + + org.easymock + easymock + 3.4 + test + +====1 +1:80,81c + 1.7 + 1.7 +2:114,115c +3:108,109c + 1.8 + 1.8 +====1 +1:108a +2:143,146c +3:137,140c + + org.objectweb.asm + org.assertj.core.internal.asm + +====1 +1:154,158c + + org.assertj.core.* + + + JavaSE-1.7 +2:192,193c +3:186,187c + org.assertj.core.* + JavaSE-1.8 +====1 +1:169,171c + + **/*cglib*/** + +2:203a +3:197a +====1 +1:197a +2:230,238c +3:224,232c + org.codehaus.mojo + animal-sniffer-maven-plugin + 1.14 + + + true + + + +====1 +1:215a +2:257,285c +3:251,279c + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + com.mycila + license-maven-plugin + [2.6,) + + format + + + + + + + + + + + + +====1 +1:217,220c + + + + +2:286a +3:280a diff --git a/src/python/merge_conflict_analysis_diffs/1329/intellimerge/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1329/intellimerge/diff_pom.xml.txt new file mode 100644 index 0000000000..bed4273286 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1329/intellimerge/diff_pom.xml.txt @@ -0,0 +1,151 @@ +==== +1:6c + 2.4.1-SNAPSHOT +2:6,12c + <<<<<<< HEAD + 3.4.1-SNAPSHOT + ||||||| a048d5e1d + 2.4.1-SNAPSHOT + ======= + 2.4.2-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:6c + 3.4.1-SNAPSHOT +====1 +1:9,10c + Rich and fluent assertions for testing + 2013 +2:15,16c +3:9,10c + Rich and fluent assertions for testing for Java + 2014 +====1 +1:33a +2:40,43c +3:34,37c + + -Xdoclint:none + 1.6.4 + +====1 +1:47c + 2.2.2 +2:57,63c +3:51,57c + 3.2.0 + true + + + org.ow2.asm + asm + 5.0.4 +====1 +1:49a +2:66,83c +3:60,77c + + org.powermock + powermock-module-junit4 + ${powermock.version} + test + + + org.powermock + powermock-api-mockito + ${powermock.version} + test + + + org.easymock + easymock + 3.4 + test + +====1 +1:80,81c + 1.7 + 1.7 +2:114,115c +3:108,109c + 1.8 + 1.8 +====1 +1:108a +2:143,146c +3:137,140c + + org.objectweb.asm + org.assertj.core.internal.asm + +====1 +1:154,158c + + org.assertj.core.* + + + JavaSE-1.7 +2:192,193c +3:186,187c + org.assertj.core.* + JavaSE-1.8 +====1 +1:169,171c + + **/*cglib*/** + +2:203a +3:197a +====1 +1:197a +2:230,238c +3:224,232c + org.codehaus.mojo + animal-sniffer-maven-plugin + 1.14 + + + true + + + +====1 +1:215a +2:257,285c +3:251,279c + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + com.mycila + license-maven-plugin + [2.6,) + + format + + + + + + + + + + + + +====1 +1:217,220c + + + + +2:286a +3:280a diff --git a/src/python/merge_conflict_analysis_diffs/1329/spork/diff_AbstractBDDSoftAssertions.java.txt b/src/python/merge_conflict_analysis_diffs/1329/spork/diff_AbstractBDDSoftAssertions.java.txt new file mode 100644 index 0000000000..731310b9d2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1329/spork/diff_AbstractBDDSoftAssertions.java.txt @@ -0,0 +1,400 @@ +====1 +1:15c + import static org.assertj.core.api.Assertions.catchThrowable; +2:15c +3:15c + import static org.assertj.core.api.AssertionsForClassTypes.catchThrowable; +====1 +1:22a +2:23,28c +3:23,28c + import java.time.LocalDate; + import java.time.LocalDateTime; + import java.time.LocalTime; + import java.time.OffsetDateTime; + import java.time.OffsetTime; + import java.time.ZonedDateTime; +====1 +1:26a +2:33,37c +3:33,37c + import java.util.Optional; + import java.util.OptionalDouble; + import java.util.OptionalInt; + import java.util.OptionalLong; + import java.util.concurrent.CompletableFuture; +====1 +1:33c + +2:44c +3:44c + +====1 +1:41c + return proxy(BigDecimalAssert.class, BigDecimal.class, actual); +2:52c +3:52c + return proxy(BigDecimalAssert.class, BigDecimal.class, actual); +====1 +1:51c + return proxy(BooleanAssert.class, Boolean.class, actual); +2:62c +3:62c + return proxy(BooleanAssert.class, Boolean.class, actual); +====1 +1:61c + return proxy(BooleanAssert.class, Boolean.class, actual); +2:72c +3:72c + return proxy(BooleanAssert.class, Boolean.class, actual); +====1 +1:71c + return proxy(BooleanArrayAssert.class, boolean[].class, actual); +2:82c +3:82c + return proxy(BooleanArrayAssert.class, boolean[].class, actual); +====1 +1:81c + return proxy(ByteAssert.class, Byte.class, actual); +2:92c +3:92c + return proxy(ByteAssert.class, Byte.class, actual); +====1 +1:91c + return proxy(ByteAssert.class, Byte.class, actual); +2:102c +3:102c + return proxy(ByteAssert.class, Byte.class, actual); +====1 +1:101c + return proxy(ByteArrayAssert.class, byte[].class, actual); +2:112c +3:112c + return proxy(ByteArrayAssert.class, byte[].class, actual); +====1 +1:111c + return proxy(CharacterAssert.class, Character.class, actual); +2:122c +3:122c + return proxy(CharacterAssert.class, Character.class, actual); +====1 +1:121c + return proxy(CharArrayAssert.class, char[].class, actual); +2:132c +3:132c + return proxy(CharArrayAssert.class, char[].class, actual); +====1 +1:131c + return proxy(CharacterAssert.class, Character.class, actual); +2:142c +3:142c + return proxy(CharacterAssert.class, Character.class, actual); +====1 +1:143c + return proxy(SoftAssertionClassAssert.class, Class.class, actual); +2:154c +3:154c + return proxy(SoftAssertionClassAssert.class, Class.class, actual); +====3 +1:155c +2:166c + return proxy(AbstractComparableAssert.class, Comparable.class, actual); +3:166c + return proxy(GenericComparableAssert.class, Comparable.class, actual); +====1 +1:166c + return proxy(IterableAssert.class, Iterable.class, actual); +2:177c +3:177c + return proxy(IterableAssert.class, Iterable.class, actual); +====1 +1:179c + return proxy(IterableAssert.class, Iterator.class, actual); +2:190c +3:190c + return proxy(IterableAssert.class, Iterator.class, actual); +====1 +1:189c + return proxy(DoubleAssert.class, Double.class, actual); +2:200c +3:200c + return proxy(DoubleAssert.class, Double.class, actual); +====1 +1:199c + return proxy(DoubleAssert.class, Double.class, actual); +2:210c +3:210c + return proxy(DoubleAssert.class, Double.class, actual); +====1 +1:209c + return proxy(DoubleArrayAssert.class, double[].class, actual); +2:220c +3:220c + return proxy(DoubleArrayAssert.class, double[].class, actual); +====1 +1:219c + return proxy(FileAssert.class, File.class, actual); +2:230c +3:230c + return proxy(FileAssert.class, File.class, actual); +====1 +1:239c + return proxy(InputStreamAssert.class, InputStream.class, actual); +2:250c +3:250c + return proxy(InputStreamAssert.class, InputStream.class, actual); +====1 +1:249c + return proxy(FloatAssert.class, Float.class, actual); +2:260c +3:260c + return proxy(FloatAssert.class, Float.class, actual); +====1 +1:259c + return proxy(FloatAssert.class, Float.class, actual); +2:270c +3:270c + return proxy(FloatAssert.class, Float.class, actual); +====1 +1:269c + return proxy(FloatArrayAssert.class, float[].class, actual); +2:280c +3:280c + return proxy(FloatArrayAssert.class, float[].class, actual); +====1 +1:279c + return proxy(IntegerAssert.class, Integer.class, actual); +2:290c +3:290c + return proxy(IntegerAssert.class, Integer.class, actual); +====1 +1:289c + return proxy(IntArrayAssert.class, int[].class, actual); +2:300c +3:300c + return proxy(IntArrayAssert.class, int[].class, actual); +====1 +1:299c + return proxy(IntegerAssert.class, Integer.class, actual); +2:310c +3:310c + return proxy(IntegerAssert.class, Integer.class, actual); +====1 +1:310c + return proxy(ListAssert.class, List.class, actual); +2:321c +3:321c + return proxy(ListAssert.class, List.class, actual); +====1 +1:320c + return proxy(LongAssert.class, Long.class, actual); +2:331c +3:331c + return proxy(LongAssert.class, Long.class, actual); +====1 +1:330c + return proxy(LongAssert.class, Long.class, actual); +2:341c +3:341c + return proxy(LongAssert.class, Long.class, actual); +====1 +1:340c + return proxy(LongArrayAssert.class, long[].class, actual); +2:351c +3:351c + return proxy(LongArrayAssert.class, long[].class, actual); +====1 +1:351c + return proxy(ObjectAssert.class, Object.class, actual); +2:362c +3:362c + return proxy(ObjectAssert.class, Object.class, actual); +====1 +1:362c + return proxy(ObjectArrayAssert.class, Object[].class, actual); +2:373c +3:373c + return proxy(ObjectArrayAssert.class, Object[].class, actual); +====1 +1:375c + return proxy(SoftAssertionMapAssert.class, Map.class, actual); +2:386c +3:386c + return proxy(SoftAssertionMapAssert.class, Map.class, actual); +====1 +1:385c + return proxy(ShortAssert.class, Short.class, actual); +2:396c +3:396c + return proxy(ShortAssert.class, Short.class, actual); +====1 +1:395c + return proxy(ShortAssert.class, Short.class, actual); +2:406c +3:406c + return proxy(ShortAssert.class, Short.class, actual); +====1 +1:405c + return proxy(ShortArrayAssert.class, short[].class, actual); +2:416c +3:416c + return proxy(ShortArrayAssert.class, short[].class, actual); +====1 +1:415c + return proxy(CharSequenceAssert.class, CharSequence.class, actual); +2:426c +3:426c + return proxy(CharSequenceAssert.class, CharSequence.class, actual); +====1 +1:425c + return proxy(StringAssert.class, String.class, actual); +2:436c +3:436c + return proxy(StringAssert.class, String.class, actual); +====1 +1:435c + return proxy(DateAssert.class, Date.class, actual); +2:446c +3:446c + return proxy(DateAssert.class, Date.class, actual); +====1 +1:445c + return proxy(ThrowableAssert.class, Throwable.class, actual); +2:456c +3:456c + return proxy(ThrowableAssert.class, Throwable.class, actual); +====1 +1:456c + * softly.thenThrownBy(() -> { throw new Exception("boom!") }).isInstanceOf(Exception.class) +2:467c +3:467c + * softly.thenThrownBy(() -> { throw new Exception("boom!"); }).isInstanceOf(Exception.class) +====1 +1:478c + +2:489,585c +3:489,585c + + /** + * Create assertion for {@link java.util.Optional}. + * + * @param actual the actual value. + * @param the type of the value contained in the {@link java.util.Optional}. + * + * @return the created assertion object. + */ + @SuppressWarnings("unchecked") + public OptionalAssert then(Optional actual) { + return proxy(OptionalAssert.class, Optional.class, actual); + } + + /** + * Create assertion for {@link java.util.OptionalDouble}. + * + * @param actual the actual value. + * + * @return the created assertion object. + */ + public OptionalDoubleAssert then(OptionalDouble actual) { + return proxy(OptionalDoubleAssert.class, OptionalDouble.class, actual); + } + + /** + * Create assertion for {@link java.util.OptionalInt}. + * + * @param actual the actual value. + * + * @return the created assertion object. + */ + public OptionalIntAssert then(OptionalInt actual) { + return proxy(OptionalIntAssert.class, OptionalInt.class, actual); + } + + /** + * Create assertion for {@link java.util.OptionalLong}. + * + * @param actual the actual value. + * + * @return the created assertion object. + */ + public OptionalLongAssert then(OptionalLong actual) { + return proxy(OptionalLongAssert.class, OptionalLong.class, actual); + } + + /** + * Creates a new instance of {@link LocalDateAssert}. + * + * @param actual the actual value. + * @return the created assertion object. + */ + public LocalDateAssert then(LocalDate actual) { + return proxy(LocalDateAssert.class, LocalDate.class, actual); + } + + /** + * Creates a new instance of {@link LocalDateTimeAssert}. + * + * @param actual the actual value. + * @return the created assertion object. + */ + public LocalDateTimeAssert then(LocalDateTime actual) { + return proxy(LocalDateTimeAssert.class, LocalDateTime.class, actual); + } + + /** + * Creates a new instance of {@link ZonedDateTimeAssert}. + * + * @param actual the actual value. + * @return the created assertion object. + */ + public ZonedDateTimeAssert then(ZonedDateTime actual) { + return proxy(ZonedDateTimeAssert.class, ZonedDateTime.class, actual); + } + + /** + * Creates a new instance of {@link LocalTimeAssert}. + * + * @param actual the actual value. + * @return the created assertion object. + */ + public LocalTimeAssert then(LocalTime actual) { + return proxy(LocalTimeAssert.class, LocalTime.class, actual); + } + + /** + * Creates a new instance of {@link OffsetTimeAssert}. + * + * @param actual the actual value. + * @return the created assertion object. + */ + public OffsetTimeAssert then(OffsetTime actual) { + return proxy(OffsetTimeAssert.class, OffsetTime.class, actual); + } + +====1 +1:497a +2:605,627c +3:605,627c + + /** + * Creates a new instance of {@link OffsetDateTimeAssert}. + * + * @param actual the actual value. + * @return the created assertion object. + */ + public OffsetDateTimeAssert then(OffsetDateTime actual) { + return proxy(OffsetDateTimeAssert.class, OffsetDateTime.class, actual); + } + + /** + * Create assertion for {@link java.util.concurrent.CompletableFuture}. + * + * @param future the actual value. + * @param the type of the value contained in the {@link java.util.concurrent.CompletableFuture}. + * + * @return the created assertion object. + */ + @SuppressWarnings("unchecked") + public CompletableFutureAssert then(CompletableFuture actual) { + return proxy(CompletableFutureAssert.class, CompletableFuture.class, actual); + } diff --git a/src/python/merge_conflict_analysis_diffs/1329/spork/diff_AbstractStandardSoftAssertions.java.txt b/src/python/merge_conflict_analysis_diffs/1329/spork/diff_AbstractStandardSoftAssertions.java.txt new file mode 100644 index 0000000000..3243388634 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1329/spork/diff_AbstractStandardSoftAssertions.java.txt @@ -0,0 +1,168 @@ +====1 +1:15c + import static org.assertj.core.api.Assertions.catchThrowable; +2:15c +3:15c + import static org.assertj.core.api.AssertionsForClassTypes.catchThrowable; +====1 +1:22a +2:23,28c +3:23,28c + import java.time.LocalDate; + import java.time.LocalDateTime; + import java.time.LocalTime; + import java.time.OffsetDateTime; + import java.time.OffsetTime; + import java.time.ZonedDateTime; +====1 +1:26a +2:33,37c +3:33,37c + import java.util.Optional; + import java.util.OptionalDouble; + import java.util.OptionalInt; + import java.util.OptionalLong; + import java.util.concurrent.CompletableFuture; +====3 +1:155c +2:166c + return proxy(AbstractComparableAssert.class, Comparable.class, actual); +3:166c + return proxy(GenericComparableAssert.class, Comparable.class, actual); +====1 +1:455c + * softly.assertThatThrownBy(() -> { throw new Exception("boom!") }).isInstanceOf(Exception.class) +2:466c +3:466c + * softly.assertThatThrownBy(() -> { throw new Exception("boom!"); }).isInstanceOf(Exception.class) +====1 +1:476a +2:488,573c +3:488,573c + + /** + * Create assertion for {@link java.util.Optional}. + * + * @param actual the actual value. + * @param the type of the value contained in the {@link java.util.Optional}. + * + * @return the created assertion object. + */ + @SuppressWarnings("unchecked") + public OptionalAssert assertThat(Optional actual) { + return proxy(OptionalAssert.class, Optional.class, actual); + } + + /** + * Create assertion for {@link java.util.OptionalDouble}. + * + * @param actual the actual value. + * + * @return the created assertion object. + */ + public OptionalDoubleAssert assertThat(OptionalDouble actual) { + return proxy(OptionalDoubleAssert.class, OptionalDouble.class, actual); + } + + /** + * Create assertion for {@link java.util.OptionalLong}. + * + * @param actual the actual value. + * + * @return the created assertion object. + */ + public OptionalLongAssert assertThat(OptionalLong actual) { + return proxy(OptionalLongAssert.class, OptionalLong.class, actual); + } + + /** + * Create assertion for {@link java.util.OptionalInt}. + * + * @param actual the actual value. + * + * @return the created assertion object. + */ + public OptionalIntAssert assertThat(OptionalInt actual) { + return proxy(OptionalIntAssert.class, OptionalInt.class, actual); + } + + /** + * Creates a new instance of {@link LocalDateAssert}. + * + * @param actual the actual value. + * @return the created assertion object. + */ + public LocalDateAssert assertThat(LocalDate actual) { + return proxy(LocalDateAssert.class, LocalDate.class, actual); + } + + /** + * Creates a new instance of {@link LocalDateTimeAssert}. + * + * @param actual the actual value. + * @return the created assertion object. + */ + public LocalDateTimeAssert assertThat(LocalDateTime actual) { + return proxy(LocalDateTimeAssert.class, LocalDateTime.class, actual); + } + + /** + * Creates a new instance of {@link ZonedDateTimeAssert}. + * + * @param actual the actual value. + * @return the created assertion object. + */ + public ZonedDateTimeAssert assertThat(ZonedDateTime actual) { + return proxy(ZonedDateTimeAssert.class, ZonedDateTime.class, actual); + } + + /** + * Creates a new instance of {@link LocalTimeAssert}. + * + * @param actual the actual value. + * @return the created assertion object. + */ + public LocalTimeAssert assertThat(LocalTime actual) { + return proxy(LocalTimeAssert.class, LocalTime.class, actual); + } +====1 +1:478a +2:576,595c +3:576,595c + * Creates a new instance of {@link OffsetTimeAssert}. + * + * @param actual the actual value. + * @return the created assertion object. + */ + public OffsetTimeAssert assertThat(OffsetTime actual) { + return proxy(OffsetTimeAssert.class, OffsetTime.class, actual); + } + + /** + * Creates a new instance of {@link OffsetDateTimeAssert}. + * + * @param actual the actual value. + * @return the created assertion object. + */ + public OffsetDateTimeAssert assertThat(OffsetDateTime actual) { + return proxy(OffsetDateTimeAssert.class, OffsetDateTime.class, actual); + } + + /** +====1 +1:497a +2:615,627c +3:615,627c + /** + * Create assertion for {@link java.util.concurrent.CompletableFuture}. + * + * @param future the actual value. + * @param the type of the value contained in the {@link java.util.concurrent.CompletableFuture}. + * + * @return the created assertion object. + */ + @SuppressWarnings("unchecked") + public CompletableFutureAssert assertThat(CompletableFuture actual) { + return proxy(CompletableFutureAssert.class, CompletableFuture.class, actual); + } + diff --git a/src/python/merge_conflict_analysis_diffs/1329/spork/diff_BDDSoftAssertionsTest.java.txt b/src/python/merge_conflict_analysis_diffs/1329/spork/diff_BDDSoftAssertionsTest.java.txt new file mode 100644 index 0000000000..e94d3c1ba1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1329/spork/diff_BDDSoftAssertionsTest.java.txt @@ -0,0 +1,88 @@ +====1 +1:23a +2:24,28c +3:24,28c + import java.time.LocalDateTime; + import java.time.LocalTime; + import java.time.OffsetDateTime; + import java.time.OffsetTime; + import java.time.ZoneOffset; +====1 +1:24a +2:30,33c +3:30,33c + import java.util.Optional; + import java.util.OptionalDouble; + import java.util.OptionalInt; + import java.util.OptionalLong; +====3 +1:26a +2:35a +3:36c + import org.assertj.core.api.test.ComparableExample; +====1 +1:140a +2:150,154c +3:151,155c + softly.then(Optional.of("not empty")).isEqualTo("empty"); + // TODO should be caught : softly.assertThat(Optional.of("not empty")).isEmpty(); + softly.then(OptionalInt.of(0)).isEqualTo(1); + softly.then(OptionalDouble.of(0.0)).isEqualTo(1.0); + softly.then(OptionalLong.of(0L)).isEqualTo(1L); +====1 +1:141a +2:156,161c +3:157,162c + + softly.then(LocalTime.of(12, 0)).isEqualTo(LocalTime.of(13, 0)); + softly.then(OffsetTime.of(12, 0, 0, 0, ZoneOffset.UTC)).isEqualTo(OffsetTime.of(13, 0, 0, 0, ZoneOffset.UTC)); + softly.then(OffsetDateTime.MIN).isEqualTo(LocalDateTime.MAX); + // softly.then(completedFuture("done")).hasFailed(); + +====1 +1:142a +2:163c +3:164c + +====1 +1:143a +2:165c +3:166c + +====1 +1:146c + assertThat(errors).hasSize(40); +2:168,169c +3:169,170c + assertThat(errors).hasSize(47); + +====1 +1:219c + assertThat(errors.get(39)).contains(String.format("%nExpecting port of")); +2:241a +3:242a +====1 +1:220a +2:243,251c +3:244,252c + assertThat(errors.get(39)).isEqualTo("expected:<[\"empty\"]> but was:<[Optional[not empty]]>"); + assertThat(errors.get(40)).isEqualTo("expected:<[1]> but was:<[OptionalInt[0]]>"); + assertThat(errors.get(41)).isEqualTo("expected:<[1.0]> but was:<[OptionalDouble[0.0]]>"); + assertThat(errors.get(42)).isEqualTo("expected:<[1L]> but was:<[OptionalLong[0]]>"); + assertThat(errors.get(43)).contains(String.format("%nExpecting port of")); + assertThat(errors.get(44)).isEqualTo("expected:<1[3]:00> but was:<1[2]:00>"); + assertThat(errors.get(45)).isEqualTo("expected:<1[3]:00Z> but was:<1[2]:00Z>"); + assertThat(errors.get(46)).isEqualTo("expected:<[+999999999-12-31T23:59:59.999999999]> but was:<[-999999999-01-01T00:00+18:00]>"); + // assertThat(errors.get(47)).isEqualTo(""); +====3 +1:223a +2:254a +3:256,263c + @Test + public void should_work_with_comparable() throws Exception { + ComparableExample example1 = new ComparableExample(0); + ComparableExample example2 = new ComparableExample(0); + softly.then(example1).isEqualByComparingTo(example2); + softly.assertAll(); + } + diff --git a/src/python/merge_conflict_analysis_diffs/1329/spork/diff_SoftAssertionsTest.java.txt b/src/python/merge_conflict_analysis_diffs/1329/spork/diff_SoftAssertionsTest.java.txt new file mode 100644 index 0000000000..f16f29b4ab --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1329/spork/diff_SoftAssertionsTest.java.txt @@ -0,0 +1,92 @@ +====1 +1:27a +2:28,30c +3:28,30c + import java.time.LocalTime; + import java.time.OffsetTime; + import java.time.ZoneOffset; +====1 +1:30a +2:34,38c +3:34,38c + import java.util.Optional; + import java.util.OptionalDouble; + import java.util.OptionalInt; + import java.util.OptionalLong; + import java.util.concurrent.CompletableFuture; +====3 +1:33a +2:41a +3:42c + import org.assertj.core.api.test.ComparableExample; +====1 +1:190a +2:199c +3:200c + +====1 +1:191a +2:201,208c +3:202,209c + + softly.assertThat(LocalTime.of(12, 00)).isEqualTo(LocalTime.of(13,00)); + softly.assertThat(OffsetTime.of(12, 0, 0, 0, ZoneOffset.UTC)).isEqualTo(OffsetTime.of(13, 0, 0, 0, ZoneOffset.UTC)); + + softly.assertThat(Optional.of("not empty")).isEqualTo("empty"); + softly.assertThat(OptionalInt.of(0)).isEqualTo(1); + softly.assertThat(OptionalDouble.of(0.0)).isEqualTo(1.0); + softly.assertThat(OptionalLong.of(0L)).isEqualTo(1L); +====1 +1:192a +2:210c +3:211c + softly.assertThat(CompletableFuture.completedFuture("done")).hasFailed(); +====1 +1:198c + assertThat(errors).hasSize(41); +2:216c +3:217c + assertThat(errors).hasSize(48); +====1 +1:272,277c + + " <{\"54\"=\"55\"}>%n" + + "to contain:%n" + + " <[MapEntry[key=\"1\", value=\"2\"]]>%n" + + "but could not find:%n" + + " <[MapEntry[key=\"1\", value=\"2\"]]>%n")); + assertThat(errors.get(40)).contains("Expecting port of"); +2:290,304c +3:291,305c + + " <{\"54\"=\"55\"}>%n" + + "to contain:%n" + + " <[MapEntry[key=\"1\", value=\"2\"]]>%n" + + "but could not find:%n" + + " <[MapEntry[key=\"1\", value=\"2\"]]>%n")); + + assertThat(errors.get(40)).isEqualTo("expected:<1[3]:00> but was:<1[2]:00>"); + assertThat(errors.get(41)).isEqualTo("expected:<1[3]:00Z> but was:<1[2]:00Z>"); + + assertThat(errors.get(42)).isEqualTo("expected:<[\"empty\"]> but was:<[Optional[not empty]]>"); + assertThat(errors.get(43)).isEqualTo("expected:<[1]> but was:<[OptionalInt[0]]>"); + assertThat(errors.get(44)).isEqualTo("expected:<[1.0]> but was:<[OptionalDouble[0.0]]>"); + assertThat(errors.get(45)).isEqualTo("expected:<[1L]> but was:<[OptionalLong[0]]>"); + assertThat(errors.get(46)).contains("Expecting port of"); + assertThat(errors.get(47)).contains("to have failed"); +====1 +1:279c + } +2:306c +3:307c + } +====3 +1:523a +2:550a +3:552,559c + @Test + public void should_work_with_comparable() throws Exception { + ComparableExample example1 = new ComparableExample(0); + ComparableExample example2 = new ComparableExample(0); + softly.assertThat(example1).isEqualByComparingTo(example2); + softly.assertAll(); + } + diff --git a/src/python/merge_conflict_analysis_diffs/1329/spork/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1329/spork/diff_pom.xml.txt new file mode 100644 index 0000000000..1d5f75309c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1329/spork/diff_pom.xml.txt @@ -0,0 +1,144 @@ +====1 +1:6c + 2.4.1-SNAPSHOT +2:6c +3:6c + 3.4.1-SNAPSHOT +====1 +1:9,10c + Rich and fluent assertions for testing + 2013 +2:9,10c +3:9,10c + Rich and fluent assertions for testing for Java + 2014 +====1 +1:33a +2:34,37c +3:34,37c + + -Xdoclint:none + 1.6.4 + +====1 +1:47c + 2.2.2 +2:51,57c +3:51,57c + 3.2.0 + true + + + org.ow2.asm + asm + 5.0.4 +====1 +1:49a +2:60,77c +3:60,77c + + org.powermock + powermock-module-junit4 + ${powermock.version} + test + + + org.powermock + powermock-api-mockito + ${powermock.version} + test + + + org.easymock + easymock + 3.4 + test + +====1 +1:80,81c + 1.7 + 1.7 +2:108,109c +3:108,109c + 1.8 + 1.8 +====1 +1:108a +2:137,140c +3:137,140c + + org.objectweb.asm + org.assertj.core.internal.asm + +====1 +1:154,158c + + org.assertj.core.* + + + JavaSE-1.7 +2:186,187c +3:186,187c + org.assertj.core.* + JavaSE-1.8 +====1 +1:169,171c + + **/*cglib*/** + +2:197a +3:197a +====1 +1:197a +2:224,232c +3:224,232c + org.codehaus.mojo + animal-sniffer-maven-plugin + 1.14 + + + true + + + +====1 +1:215a +2:251,279c +3:251,279c + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + com.mycila + license-maven-plugin + [2.6,) + + format + + + + + + + + + + + + +====1 +1:217,220c + + + + +2:280a +3:280a diff --git a/src/python/merge_conflict_analysis_diffs/1442/git_hires_merge/diff_Categories.java.txt b/src/python/merge_conflict_analysis_diffs/1442/git_hires_merge/diff_Categories.java.txt new file mode 100644 index 0000000000..b23c05a8a2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1442/git_hires_merge/diff_Categories.java.txt @@ -0,0 +1,72 @@ +====1 +1:287c + HashSet> c= new HashSet>(); +2:287c +3:287c + Set> c= new HashSet>(); +====3 +1:318c +2:318c + assertNoCategorizedDescendentsOfUncategorizeableParents(getDescription()); +3:317a +==== +1:341,368c + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (!canHaveCategorizedChildren(description)) { + assertNoDescendantsHaveCategoryAnnotations(description); + } + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + +2:341,369c + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (canHaveCategorizedChildren(description)) { + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } else { + assertNoDescendantsHaveCategoryAnnotations(description); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + +3:339a diff --git a/src/python/merge_conflict_analysis_diffs/1442/gitmerge_ort/diff_Categories.java.txt b/src/python/merge_conflict_analysis_diffs/1442/gitmerge_ort/diff_Categories.java.txt new file mode 100644 index 0000000000..9bdbea834f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1442/gitmerge_ort/diff_Categories.java.txt @@ -0,0 +1,104 @@ +====1 +1:287c + HashSet> c= new HashSet>(); +2:287c +3:287c + Set> c= new HashSet>(); +====1 +1:318c + assertNoCategorizedDescendentsOfUncategorizeableParents(getDescription()); +2:317a +3:317a +==== +1:341,368c + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (!canHaveCategorizedChildren(description)) { + assertNoDescendantsHaveCategoryAnnotations(description); + } + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + +2:340,400c + <<<<<<< HEAD + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (canHaveCategorizedChildren(description)) { + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } else { + assertNoDescendantsHaveCategoryAnnotations(description); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + + ||||||| b03c6a529 + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (!canHaveCategorizedChildren(description)) { + assertNoDescendantsHaveCategoryAnnotations(description); + } + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:339a diff --git a/src/python/merge_conflict_analysis_diffs/1442/gitmerge_ort_adjacent/diff_Categories.java.txt b/src/python/merge_conflict_analysis_diffs/1442/gitmerge_ort_adjacent/diff_Categories.java.txt new file mode 100644 index 0000000000..b23c05a8a2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1442/gitmerge_ort_adjacent/diff_Categories.java.txt @@ -0,0 +1,72 @@ +====1 +1:287c + HashSet> c= new HashSet>(); +2:287c +3:287c + Set> c= new HashSet>(); +====3 +1:318c +2:318c + assertNoCategorizedDescendentsOfUncategorizeableParents(getDescription()); +3:317a +==== +1:341,368c + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (!canHaveCategorizedChildren(description)) { + assertNoDescendantsHaveCategoryAnnotations(description); + } + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + +2:341,369c + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (canHaveCategorizedChildren(description)) { + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } else { + assertNoDescendantsHaveCategoryAnnotations(description); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + +3:339a diff --git a/src/python/merge_conflict_analysis_diffs/1442/gitmerge_ort_ignorespace/diff_Categories.java.txt b/src/python/merge_conflict_analysis_diffs/1442/gitmerge_ort_ignorespace/diff_Categories.java.txt new file mode 100644 index 0000000000..9bdbea834f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1442/gitmerge_ort_ignorespace/diff_Categories.java.txt @@ -0,0 +1,104 @@ +====1 +1:287c + HashSet> c= new HashSet>(); +2:287c +3:287c + Set> c= new HashSet>(); +====1 +1:318c + assertNoCategorizedDescendentsOfUncategorizeableParents(getDescription()); +2:317a +3:317a +==== +1:341,368c + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (!canHaveCategorizedChildren(description)) { + assertNoDescendantsHaveCategoryAnnotations(description); + } + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + +2:340,400c + <<<<<<< HEAD + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (canHaveCategorizedChildren(description)) { + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } else { + assertNoDescendantsHaveCategoryAnnotations(description); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + + ||||||| b03c6a529 + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (!canHaveCategorizedChildren(description)) { + assertNoDescendantsHaveCategoryAnnotations(description); + } + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:339a diff --git a/src/python/merge_conflict_analysis_diffs/1442/gitmerge_ort_imports/diff_Categories.java.txt b/src/python/merge_conflict_analysis_diffs/1442/gitmerge_ort_imports/diff_Categories.java.txt new file mode 100644 index 0000000000..b23c05a8a2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1442/gitmerge_ort_imports/diff_Categories.java.txt @@ -0,0 +1,72 @@ +====1 +1:287c + HashSet> c= new HashSet>(); +2:287c +3:287c + Set> c= new HashSet>(); +====3 +1:318c +2:318c + assertNoCategorizedDescendentsOfUncategorizeableParents(getDescription()); +3:317a +==== +1:341,368c + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (!canHaveCategorizedChildren(description)) { + assertNoDescendantsHaveCategoryAnnotations(description); + } + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + +2:341,369c + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (canHaveCategorizedChildren(description)) { + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } else { + assertNoDescendantsHaveCategoryAnnotations(description); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + +3:339a diff --git a/src/python/merge_conflict_analysis_diffs/1442/gitmerge_ort_imports_ignorespace/diff_Categories.java.txt b/src/python/merge_conflict_analysis_diffs/1442/gitmerge_ort_imports_ignorespace/diff_Categories.java.txt new file mode 100644 index 0000000000..b23c05a8a2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1442/gitmerge_ort_imports_ignorespace/diff_Categories.java.txt @@ -0,0 +1,72 @@ +====1 +1:287c + HashSet> c= new HashSet>(); +2:287c +3:287c + Set> c= new HashSet>(); +====3 +1:318c +2:318c + assertNoCategorizedDescendentsOfUncategorizeableParents(getDescription()); +3:317a +==== +1:341,368c + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (!canHaveCategorizedChildren(description)) { + assertNoDescendantsHaveCategoryAnnotations(description); + } + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + +2:341,369c + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (canHaveCategorizedChildren(description)) { + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } else { + assertNoDescendantsHaveCategoryAnnotations(description); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + +3:339a diff --git a/src/python/merge_conflict_analysis_diffs/1442/gitmerge_recursive_histogram/diff_Categories.java.txt b/src/python/merge_conflict_analysis_diffs/1442/gitmerge_recursive_histogram/diff_Categories.java.txt new file mode 100644 index 0000000000..9bdbea834f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1442/gitmerge_recursive_histogram/diff_Categories.java.txt @@ -0,0 +1,104 @@ +====1 +1:287c + HashSet> c= new HashSet>(); +2:287c +3:287c + Set> c= new HashSet>(); +====1 +1:318c + assertNoCategorizedDescendentsOfUncategorizeableParents(getDescription()); +2:317a +3:317a +==== +1:341,368c + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (!canHaveCategorizedChildren(description)) { + assertNoDescendantsHaveCategoryAnnotations(description); + } + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + +2:340,400c + <<<<<<< HEAD + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (canHaveCategorizedChildren(description)) { + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } else { + assertNoDescendantsHaveCategoryAnnotations(description); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + + ||||||| b03c6a529 + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (!canHaveCategorizedChildren(description)) { + assertNoDescendantsHaveCategoryAnnotations(description); + } + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:339a diff --git a/src/python/merge_conflict_analysis_diffs/1442/gitmerge_recursive_ignorespace/diff_Categories.java.txt b/src/python/merge_conflict_analysis_diffs/1442/gitmerge_recursive_ignorespace/diff_Categories.java.txt new file mode 100644 index 0000000000..9bdbea834f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1442/gitmerge_recursive_ignorespace/diff_Categories.java.txt @@ -0,0 +1,104 @@ +====1 +1:287c + HashSet> c= new HashSet>(); +2:287c +3:287c + Set> c= new HashSet>(); +====1 +1:318c + assertNoCategorizedDescendentsOfUncategorizeableParents(getDescription()); +2:317a +3:317a +==== +1:341,368c + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (!canHaveCategorizedChildren(description)) { + assertNoDescendantsHaveCategoryAnnotations(description); + } + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + +2:340,400c + <<<<<<< HEAD + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (canHaveCategorizedChildren(description)) { + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } else { + assertNoDescendantsHaveCategoryAnnotations(description); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + + ||||||| b03c6a529 + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (!canHaveCategorizedChildren(description)) { + assertNoDescendantsHaveCategoryAnnotations(description); + } + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:339a diff --git a/src/python/merge_conflict_analysis_diffs/1442/gitmerge_recursive_minimal/diff_Categories.java.txt b/src/python/merge_conflict_analysis_diffs/1442/gitmerge_recursive_minimal/diff_Categories.java.txt new file mode 100644 index 0000000000..9bdbea834f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1442/gitmerge_recursive_minimal/diff_Categories.java.txt @@ -0,0 +1,104 @@ +====1 +1:287c + HashSet> c= new HashSet>(); +2:287c +3:287c + Set> c= new HashSet>(); +====1 +1:318c + assertNoCategorizedDescendentsOfUncategorizeableParents(getDescription()); +2:317a +3:317a +==== +1:341,368c + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (!canHaveCategorizedChildren(description)) { + assertNoDescendantsHaveCategoryAnnotations(description); + } + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + +2:340,400c + <<<<<<< HEAD + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (canHaveCategorizedChildren(description)) { + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } else { + assertNoDescendantsHaveCategoryAnnotations(description); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + + ||||||| b03c6a529 + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (!canHaveCategorizedChildren(description)) { + assertNoDescendantsHaveCategoryAnnotations(description); + } + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:339a diff --git a/src/python/merge_conflict_analysis_diffs/1442/gitmerge_recursive_myers/diff_Categories.java.txt b/src/python/merge_conflict_analysis_diffs/1442/gitmerge_recursive_myers/diff_Categories.java.txt new file mode 100644 index 0000000000..9bdbea834f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1442/gitmerge_recursive_myers/diff_Categories.java.txt @@ -0,0 +1,104 @@ +====1 +1:287c + HashSet> c= new HashSet>(); +2:287c +3:287c + Set> c= new HashSet>(); +====1 +1:318c + assertNoCategorizedDescendentsOfUncategorizeableParents(getDescription()); +2:317a +3:317a +==== +1:341,368c + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (!canHaveCategorizedChildren(description)) { + assertNoDescendantsHaveCategoryAnnotations(description); + } + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + +2:340,400c + <<<<<<< HEAD + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (canHaveCategorizedChildren(description)) { + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } else { + assertNoDescendantsHaveCategoryAnnotations(description); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + + ||||||| b03c6a529 + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (!canHaveCategorizedChildren(description)) { + assertNoDescendantsHaveCategoryAnnotations(description); + } + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:339a diff --git a/src/python/merge_conflict_analysis_diffs/1442/gitmerge_recursive_patience/diff_Categories.java.txt b/src/python/merge_conflict_analysis_diffs/1442/gitmerge_recursive_patience/diff_Categories.java.txt new file mode 100644 index 0000000000..9bdbea834f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1442/gitmerge_recursive_patience/diff_Categories.java.txt @@ -0,0 +1,104 @@ +====1 +1:287c + HashSet> c= new HashSet>(); +2:287c +3:287c + Set> c= new HashSet>(); +====1 +1:318c + assertNoCategorizedDescendentsOfUncategorizeableParents(getDescription()); +2:317a +3:317a +==== +1:341,368c + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (!canHaveCategorizedChildren(description)) { + assertNoDescendantsHaveCategoryAnnotations(description); + } + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + +2:340,400c + <<<<<<< HEAD + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (canHaveCategorizedChildren(description)) { + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } else { + assertNoDescendantsHaveCategoryAnnotations(description); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + + ||||||| b03c6a529 + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (!canHaveCategorizedChildren(description)) { + assertNoDescendantsHaveCategoryAnnotations(description); + } + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:339a diff --git a/src/python/merge_conflict_analysis_diffs/1442/intellimerge/diff_Categories.java.txt b/src/python/merge_conflict_analysis_diffs/1442/intellimerge/diff_Categories.java.txt new file mode 100644 index 0000000000..9bdbea834f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1442/intellimerge/diff_Categories.java.txt @@ -0,0 +1,104 @@ +====1 +1:287c + HashSet> c= new HashSet>(); +2:287c +3:287c + Set> c= new HashSet>(); +====1 +1:318c + assertNoCategorizedDescendentsOfUncategorizeableParents(getDescription()); +2:317a +3:317a +==== +1:341,368c + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (!canHaveCategorizedChildren(description)) { + assertNoDescendantsHaveCategoryAnnotations(description); + } + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + +2:340,400c + <<<<<<< HEAD + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (canHaveCategorizedChildren(description)) { + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } else { + assertNoDescendantsHaveCategoryAnnotations(description); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + + ||||||| b03c6a529 + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (!canHaveCategorizedChildren(description)) { + assertNoDescendantsHaveCategoryAnnotations(description); + } + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:339a diff --git a/src/python/merge_conflict_analysis_diffs/1442/spork/diff_Categories.java.txt b/src/python/merge_conflict_analysis_diffs/1442/spork/diff_Categories.java.txt new file mode 100644 index 0000000000..b23c05a8a2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1442/spork/diff_Categories.java.txt @@ -0,0 +1,72 @@ +====1 +1:287c + HashSet> c= new HashSet>(); +2:287c +3:287c + Set> c= new HashSet>(); +====3 +1:318c +2:318c + assertNoCategorizedDescendentsOfUncategorizeableParents(getDescription()); +3:317a +==== +1:341,368c + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (!canHaveCategorizedChildren(description)) { + assertNoDescendantsHaveCategoryAnnotations(description); + } + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + +2:341,369c + private static void assertNoCategorizedDescendentsOfUncategorizeableParents(Description description) throws InitializationError { + if (canHaveCategorizedChildren(description)) { + for (Description each : description.getChildren()) { + assertNoCategorizedDescendentsOfUncategorizeableParents(each); + } + } else { + assertNoDescendantsHaveCategoryAnnotations(description); + } + } + + private static void assertNoDescendantsHaveCategoryAnnotations(Description description) throws InitializationError { + for (Description each : description.getChildren()) { + if (each.getAnnotation(Category.class) != null) { + throw new InitializationError("Category annotations on Parameterized classes are not supported on individual methods."); + } + assertNoDescendantsHaveCategoryAnnotations(each); + } + } + + // If children have names like [0], our current magical category code can't determine their parentage. + private static boolean canHaveCategorizedChildren(Description description) { + for (Description each : description.getChildren()) { + if (each.getTestClass() == null) { + return false; + } + } + return true; + } + +3:339a diff --git a/src/python/merge_conflict_analysis_diffs/1444/git_hires_merge/diff_AllTests.java.txt b/src/python/merge_conflict_analysis_diffs/1444/git_hires_merge/diff_AllTests.java.txt new file mode 100644 index 0000000000..3a80709a70 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/git_hires_merge/diff_AllTests.java.txt @@ -0,0 +1,424 @@ +====3 +1:5c +2:5c + import junit.samples.money.MoneyTest; +3:4a +====3 +1:7,16c +2:7,16c + import org.junit.experimental.categories.CategoryFilterFactoryTest; + import org.junit.internal.MethodSorterTest; + import org.junit.internal.matchers.StacktracePrintingMatcherTest; + import org.junit.internal.matchers.ThrowableCauseMatcherTest; + import org.junit.rules.DisableOnDebugTest; + import org.junit.rules.StopwatchTest; + import org.junit.runner.FilterFactoriesTest; + import org.junit.runner.FilterOptionIntegrationTest; + import org.junit.runner.JUnitCommandLineParseResultTest; + import org.junit.runner.JUnitCoreTest; +3:6,8c + import org.junit.internal.AllInternalTests; + import org.junit.rules.AllRulesTests; + import org.junit.runner.AllRunnerTests; +====3 +1:18,21c +2:18,21c + import org.junit.runner.notification.ConcurrentRunNotifierTest; + import org.junit.runner.notification.RunNotifierTest; + import org.junit.runner.notification.SynchronizedRunListenerTest; + import org.junit.runners.CustomBlockJUnit4ClassRunnerTest; +3:10c + import org.junit.runners.AllRunnersTests; +==== +1:24,118c + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; +2:24,120c + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.OrderWithTest; + import org.junit.tests.manipulation.OrderableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; +3:13,23c + import org.junit.samples.AllSamplesTests; + import org.junit.tests.assertion.AllAssertionTests; + import org.junit.tests.deprecated.AllDeprecatedTests; + import org.junit.tests.description.AllDescriptionTests; + import org.junit.tests.experimental.AllExperimentalTests; + import org.junit.tests.junit3compatibility.AllJUnit3CompatibilityTests; + import org.junit.tests.listening.AllListeningTests; + import org.junit.tests.manipulation.AllManipulationTests; + import org.junit.tests.running.AllRunningTests; + import org.junit.tests.validation.AllValidationTests; + import org.junit.validator.AllValidatorTests; +====3 +1:120,123c +2:122,125c + // These test files need to be cleaned. See + // https://sourceforge.net/pm/task.php?func=detailtask&project_task_id=136507&group_id=15278&group_project_id=51407 + + @SuppressWarnings("deprecation") +3:24a +==== +1:126,165c + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, +2:128,169c + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + OrderableTest.class, + OrderWithTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, +3:27,41c + AllAssertionTests.class, + AllDeprecatedTests.class, + AllDescriptionTests.class, + AllExperimentalTests.class, + AllInternalTests.class, + AllJUnit3CompatibilityTests.class, + AllListeningTests.class, + AllManipulationTests.class, + AllRulesTests.class, + AllRunnersTests.class, + AllRunnerTests.class, + AllRunningTests.class, + AllSamplesTests.class, + AllValidationTests.class, + AllValidatorTests.class, +====3 +1:167,238c +2:171,242c + ExperimentalTests.class, + InheritedTestTest.class, + TestClassTest.class, + AllMembersSupplierTest.class, + SpecificDataPointsSupplierTest.class, + ParameterizedAssertionErrorTest.class, + WithDataPointMethod.class, + WithNamedDataPoints.class, + WithAutoGeneratedDataPoints.class, + MatcherTest.class, + ObjectContractTest.class, + TheoriesPerformanceTest.class, + JUnit4ClassRunnerTest.class, + UseSuiteAsASuperclassTest.class, + FilterableTest.class, + FilterTest.class, + MaxStarterTest.class, + JUnit38SortingTest.class, + MethodRulesTest.class, + TestRuleTest.class, + TimeoutRuleTest.class, + ParallelClassTest.class, + ParallelMethodTest.class, + ParentRunnerTest.class, + NameRulesTest.class, + ClassRulesTest.class, + ExpectedExceptionTest.class, + TempFolderRuleTest.class, + TemporaryFolderUsageTest.class, + ExternalResourceRuleTest.class, + VerifierRuleTest.class, + CategoryTest.class, + CategoriesAndParameterizedTest.class, + MultiCategoryTest.class, + JavadocTest.class, + ParentRunnerFilteringTest.class, + BlockJUnit4ClassRunnerOverrideTest.class, + RuleMemberValidatorTest.class, + RuleChainTest.class, + BlockJUnit4ClassRunnerTest.class, + CustomBlockJUnit4ClassRunnerTest.class, + MethodSorterTest.class, + TestedOnSupplierTest.class, + StacktracePrintingMatcherTest.class, + StopwatchTest.class, + RunNotifierTest.class, + ConcurrentRunNotifierTest.class, + SynchronizedRunListenerTest.class, + FilterOptionIntegrationTest.class, + JUnitCommandLineParseResultTest.class, + FilterFactoriesTest.class, + CategoryFilterFactoryTest.class, + FrameworkFieldTest.class, + FrameworkMethodTest.class, + FailOnTimeoutTest.class, + JUnitCoreTest.class, + TestWithParametersTest.class, + ParameterizedNamesTest.class, + PublicClassValidatorTest.class, + DisableOnDebugTest.class, + ThrowableCauseMatcherTest.class, + TestWatcherTest.class, + WithParameterSupplier.class, + FailingDataPointMethods.class, + TypeMatchingBetweenMultiDataPointsMethod.class, + TheoriesPerformanceTest.class, + MoneyTest.class, + CategoryValidatorTest.class, + ForwardCompatibilityPrintingTest.class, + DescriptionTest.class, + ErrorReportingRunnerTest.class, + TemporaryFolderRuleAssuredDeletionTest.class +3:43c + ObjectContractTest.class diff --git a/src/python/merge_conflict_analysis_diffs/1444/git_hires_merge/diff_JUnit4TestAdapter.java.txt b/src/python/merge_conflict_analysis_diffs/1444/git_hires_merge/diff_JUnit4TestAdapter.java.txt new file mode 100644 index 0000000000..45b245791d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/git_hires_merge/diff_JUnit4TestAdapter.java.txt @@ -0,0 +1,37 @@ +====1 +1:11a +2:12,13c +3:12,13c + import org.junit.runner.manipulation.GeneralOrdering; + import org.junit.runner.manipulation.InvalidOrderingException; +====1 +1:13c + import org.junit.runner.manipulation.Sortable; +2:15c +3:15c + import org.junit.runner.manipulation.Orderable; +==== +1:16c + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { +2:18c + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { +3:18,28c + /** + * The JUnit4TestAdapter enables running JUnit-4-style tests using a JUnit-3-style test runner. + * + *

    To use it, add the following to a test class: + *

    +        public static Test suite() {
    +          return new JUnit4TestAdapter(YourJUnit4TestClass.class);
    +        }
    +  
    + */ + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { +====1 +1:85a +2:88,91c +3:98,101c + + public void order(GeneralOrdering ordering) throws InvalidOrderingException { + ordering.apply(fRunner); + } diff --git a/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort/diff_AllTests.java.txt b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort/diff_AllTests.java.txt new file mode 100644 index 0000000000..fe14d49aba --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort/diff_AllTests.java.txt @@ -0,0 +1,593 @@ +====1 +1:5c + import junit.samples.money.MoneyTest; +2:4a +3:4a +====1 +1:7,16c + import org.junit.experimental.categories.CategoryFilterFactoryTest; + import org.junit.internal.MethodSorterTest; + import org.junit.internal.matchers.StacktracePrintingMatcherTest; + import org.junit.internal.matchers.ThrowableCauseMatcherTest; + import org.junit.rules.DisableOnDebugTest; + import org.junit.rules.StopwatchTest; + import org.junit.runner.FilterFactoriesTest; + import org.junit.runner.FilterOptionIntegrationTest; + import org.junit.runner.JUnitCommandLineParseResultTest; + import org.junit.runner.JUnitCoreTest; +2:6,8c +3:6,8c + import org.junit.internal.AllInternalTests; + import org.junit.rules.AllRulesTests; + import org.junit.runner.AllRunnerTests; +====1 +1:18,21c + import org.junit.runner.notification.ConcurrentRunNotifierTest; + import org.junit.runner.notification.RunNotifierTest; + import org.junit.runner.notification.SynchronizedRunListenerTest; + import org.junit.runners.CustomBlockJUnit4ClassRunnerTest; +2:10c +3:10c + import org.junit.runners.AllRunnersTests; +==== +1:24,118c + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; +2:13,219c + <<<<<<< HEAD + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.OrderWithTest; + import org.junit.tests.manipulation.OrderableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; + ||||||| 9c337dcbb + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; + ======= + import org.junit.samples.AllSamplesTests; + import org.junit.tests.assertion.AllAssertionTests; + import org.junit.tests.deprecated.AllDeprecatedTests; + import org.junit.tests.description.AllDescriptionTests; + import org.junit.tests.experimental.AllExperimentalTests; + import org.junit.tests.junit3compatibility.AllJUnit3CompatibilityTests; + import org.junit.tests.listening.AllListeningTests; + import org.junit.tests.manipulation.AllManipulationTests; + import org.junit.tests.running.AllRunningTests; + import org.junit.tests.validation.AllValidationTests; + import org.junit.validator.AllValidatorTests; + >>>>>>> TEMP_RIGHT_BRANCH +3:13,23c + import org.junit.samples.AllSamplesTests; + import org.junit.tests.assertion.AllAssertionTests; + import org.junit.tests.deprecated.AllDeprecatedTests; + import org.junit.tests.description.AllDescriptionTests; + import org.junit.tests.experimental.AllExperimentalTests; + import org.junit.tests.junit3compatibility.AllJUnit3CompatibilityTests; + import org.junit.tests.listening.AllListeningTests; + import org.junit.tests.manipulation.AllManipulationTests; + import org.junit.tests.running.AllRunningTests; + import org.junit.tests.validation.AllValidationTests; + import org.junit.validator.AllValidatorTests; +====1 +1:120,123c + // These test files need to be cleaned. See + // https://sourceforge.net/pm/task.php?func=detailtask&project_task_id=136507&group_id=15278&group_project_id=51407 + + @SuppressWarnings("deprecation") +2:220a +3:24a +==== +1:126,165c + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, +2:223,323c + <<<<<<< HEAD + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + OrderableTest.class, + OrderWithTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, + ||||||| 9c337dcbb + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, + ======= + AllAssertionTests.class, + AllDeprecatedTests.class, + AllDescriptionTests.class, + AllExperimentalTests.class, + AllInternalTests.class, + AllJUnit3CompatibilityTests.class, + AllListeningTests.class, + AllManipulationTests.class, + AllRulesTests.class, + AllRunnersTests.class, + AllRunnerTests.class, + AllRunningTests.class, + AllSamplesTests.class, + AllValidationTests.class, + AllValidatorTests.class, + >>>>>>> TEMP_RIGHT_BRANCH +3:27,41c + AllAssertionTests.class, + AllDeprecatedTests.class, + AllDescriptionTests.class, + AllExperimentalTests.class, + AllInternalTests.class, + AllJUnit3CompatibilityTests.class, + AllListeningTests.class, + AllManipulationTests.class, + AllRulesTests.class, + AllRunnersTests.class, + AllRunnerTests.class, + AllRunningTests.class, + AllSamplesTests.class, + AllValidationTests.class, + AllValidatorTests.class, +====1 +1:167,238c + ExperimentalTests.class, + InheritedTestTest.class, + TestClassTest.class, + AllMembersSupplierTest.class, + SpecificDataPointsSupplierTest.class, + ParameterizedAssertionErrorTest.class, + WithDataPointMethod.class, + WithNamedDataPoints.class, + WithAutoGeneratedDataPoints.class, + MatcherTest.class, + ObjectContractTest.class, + TheoriesPerformanceTest.class, + JUnit4ClassRunnerTest.class, + UseSuiteAsASuperclassTest.class, + FilterableTest.class, + FilterTest.class, + MaxStarterTest.class, + JUnit38SortingTest.class, + MethodRulesTest.class, + TestRuleTest.class, + TimeoutRuleTest.class, + ParallelClassTest.class, + ParallelMethodTest.class, + ParentRunnerTest.class, + NameRulesTest.class, + ClassRulesTest.class, + ExpectedExceptionTest.class, + TempFolderRuleTest.class, + TemporaryFolderUsageTest.class, + ExternalResourceRuleTest.class, + VerifierRuleTest.class, + CategoryTest.class, + CategoriesAndParameterizedTest.class, + MultiCategoryTest.class, + JavadocTest.class, + ParentRunnerFilteringTest.class, + BlockJUnit4ClassRunnerOverrideTest.class, + RuleMemberValidatorTest.class, + RuleChainTest.class, + BlockJUnit4ClassRunnerTest.class, + CustomBlockJUnit4ClassRunnerTest.class, + MethodSorterTest.class, + TestedOnSupplierTest.class, + StacktracePrintingMatcherTest.class, + StopwatchTest.class, + RunNotifierTest.class, + ConcurrentRunNotifierTest.class, + SynchronizedRunListenerTest.class, + FilterOptionIntegrationTest.class, + JUnitCommandLineParseResultTest.class, + FilterFactoriesTest.class, + CategoryFilterFactoryTest.class, + FrameworkFieldTest.class, + FrameworkMethodTest.class, + FailOnTimeoutTest.class, + JUnitCoreTest.class, + TestWithParametersTest.class, + ParameterizedNamesTest.class, + PublicClassValidatorTest.class, + DisableOnDebugTest.class, + ThrowableCauseMatcherTest.class, + TestWatcherTest.class, + WithParameterSupplier.class, + FailingDataPointMethods.class, + TypeMatchingBetweenMultiDataPointsMethod.class, + TheoriesPerformanceTest.class, + MoneyTest.class, + CategoryValidatorTest.class, + ForwardCompatibilityPrintingTest.class, + DescriptionTest.class, + ErrorReportingRunnerTest.class, + TemporaryFolderRuleAssuredDeletionTest.class +2:325c +3:43c + ObjectContractTest.class diff --git a/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort/diff_JUnit4TestAdapter.java.txt b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort/diff_JUnit4TestAdapter.java.txt new file mode 100644 index 0000000000..203ee36854 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort/diff_JUnit4TestAdapter.java.txt @@ -0,0 +1,53 @@ +====1 +1:11a +2:12,13c +3:12,13c + import org.junit.runner.manipulation.GeneralOrdering; + import org.junit.runner.manipulation.InvalidOrderingException; +====1 +1:13c + import org.junit.runner.manipulation.Sortable; +2:15c +3:15c + import org.junit.runner.manipulation.Orderable; +==== +1:16c + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { +2:18,34c + <<<<<<< HEAD + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { + ||||||| 9c337dcbb + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { + ======= + /** + * The JUnit4TestAdapter enables running JUnit-4-style tests using a JUnit-3-style test runner. + * + *

    To use it, add the following to a test class: + *

    +        public static Test suite() {
    +          return new JUnit4TestAdapter(YourJUnit4TestClass.class);
    +        }
    +  
    + */ + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { + >>>>>>> TEMP_RIGHT_BRANCH +3:18,28c + /** + * The JUnit4TestAdapter enables running JUnit-4-style tests using a JUnit-3-style test runner. + * + *

    To use it, add the following to a test class: + *

    +        public static Test suite() {
    +          return new JUnit4TestAdapter(YourJUnit4TestClass.class);
    +        }
    +  
    + */ + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { +====1 +1:85a +2:104,107c +3:98,101c + + public void order(GeneralOrdering ordering) throws InvalidOrderingException { + ordering.apply(fRunner); + } diff --git a/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_adjacent/diff_AllTests.java.txt b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_adjacent/diff_AllTests.java.txt new file mode 100644 index 0000000000..3a80709a70 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_adjacent/diff_AllTests.java.txt @@ -0,0 +1,424 @@ +====3 +1:5c +2:5c + import junit.samples.money.MoneyTest; +3:4a +====3 +1:7,16c +2:7,16c + import org.junit.experimental.categories.CategoryFilterFactoryTest; + import org.junit.internal.MethodSorterTest; + import org.junit.internal.matchers.StacktracePrintingMatcherTest; + import org.junit.internal.matchers.ThrowableCauseMatcherTest; + import org.junit.rules.DisableOnDebugTest; + import org.junit.rules.StopwatchTest; + import org.junit.runner.FilterFactoriesTest; + import org.junit.runner.FilterOptionIntegrationTest; + import org.junit.runner.JUnitCommandLineParseResultTest; + import org.junit.runner.JUnitCoreTest; +3:6,8c + import org.junit.internal.AllInternalTests; + import org.junit.rules.AllRulesTests; + import org.junit.runner.AllRunnerTests; +====3 +1:18,21c +2:18,21c + import org.junit.runner.notification.ConcurrentRunNotifierTest; + import org.junit.runner.notification.RunNotifierTest; + import org.junit.runner.notification.SynchronizedRunListenerTest; + import org.junit.runners.CustomBlockJUnit4ClassRunnerTest; +3:10c + import org.junit.runners.AllRunnersTests; +==== +1:24,118c + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; +2:24,120c + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.OrderWithTest; + import org.junit.tests.manipulation.OrderableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; +3:13,23c + import org.junit.samples.AllSamplesTests; + import org.junit.tests.assertion.AllAssertionTests; + import org.junit.tests.deprecated.AllDeprecatedTests; + import org.junit.tests.description.AllDescriptionTests; + import org.junit.tests.experimental.AllExperimentalTests; + import org.junit.tests.junit3compatibility.AllJUnit3CompatibilityTests; + import org.junit.tests.listening.AllListeningTests; + import org.junit.tests.manipulation.AllManipulationTests; + import org.junit.tests.running.AllRunningTests; + import org.junit.tests.validation.AllValidationTests; + import org.junit.validator.AllValidatorTests; +====3 +1:120,123c +2:122,125c + // These test files need to be cleaned. See + // https://sourceforge.net/pm/task.php?func=detailtask&project_task_id=136507&group_id=15278&group_project_id=51407 + + @SuppressWarnings("deprecation") +3:24a +==== +1:126,165c + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, +2:128,169c + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + OrderableTest.class, + OrderWithTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, +3:27,41c + AllAssertionTests.class, + AllDeprecatedTests.class, + AllDescriptionTests.class, + AllExperimentalTests.class, + AllInternalTests.class, + AllJUnit3CompatibilityTests.class, + AllListeningTests.class, + AllManipulationTests.class, + AllRulesTests.class, + AllRunnersTests.class, + AllRunnerTests.class, + AllRunningTests.class, + AllSamplesTests.class, + AllValidationTests.class, + AllValidatorTests.class, +====3 +1:167,238c +2:171,242c + ExperimentalTests.class, + InheritedTestTest.class, + TestClassTest.class, + AllMembersSupplierTest.class, + SpecificDataPointsSupplierTest.class, + ParameterizedAssertionErrorTest.class, + WithDataPointMethod.class, + WithNamedDataPoints.class, + WithAutoGeneratedDataPoints.class, + MatcherTest.class, + ObjectContractTest.class, + TheoriesPerformanceTest.class, + JUnit4ClassRunnerTest.class, + UseSuiteAsASuperclassTest.class, + FilterableTest.class, + FilterTest.class, + MaxStarterTest.class, + JUnit38SortingTest.class, + MethodRulesTest.class, + TestRuleTest.class, + TimeoutRuleTest.class, + ParallelClassTest.class, + ParallelMethodTest.class, + ParentRunnerTest.class, + NameRulesTest.class, + ClassRulesTest.class, + ExpectedExceptionTest.class, + TempFolderRuleTest.class, + TemporaryFolderUsageTest.class, + ExternalResourceRuleTest.class, + VerifierRuleTest.class, + CategoryTest.class, + CategoriesAndParameterizedTest.class, + MultiCategoryTest.class, + JavadocTest.class, + ParentRunnerFilteringTest.class, + BlockJUnit4ClassRunnerOverrideTest.class, + RuleMemberValidatorTest.class, + RuleChainTest.class, + BlockJUnit4ClassRunnerTest.class, + CustomBlockJUnit4ClassRunnerTest.class, + MethodSorterTest.class, + TestedOnSupplierTest.class, + StacktracePrintingMatcherTest.class, + StopwatchTest.class, + RunNotifierTest.class, + ConcurrentRunNotifierTest.class, + SynchronizedRunListenerTest.class, + FilterOptionIntegrationTest.class, + JUnitCommandLineParseResultTest.class, + FilterFactoriesTest.class, + CategoryFilterFactoryTest.class, + FrameworkFieldTest.class, + FrameworkMethodTest.class, + FailOnTimeoutTest.class, + JUnitCoreTest.class, + TestWithParametersTest.class, + ParameterizedNamesTest.class, + PublicClassValidatorTest.class, + DisableOnDebugTest.class, + ThrowableCauseMatcherTest.class, + TestWatcherTest.class, + WithParameterSupplier.class, + FailingDataPointMethods.class, + TypeMatchingBetweenMultiDataPointsMethod.class, + TheoriesPerformanceTest.class, + MoneyTest.class, + CategoryValidatorTest.class, + ForwardCompatibilityPrintingTest.class, + DescriptionTest.class, + ErrorReportingRunnerTest.class, + TemporaryFolderRuleAssuredDeletionTest.class +3:43c + ObjectContractTest.class diff --git a/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_adjacent/diff_JUnit4TestAdapter.java.txt b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_adjacent/diff_JUnit4TestAdapter.java.txt new file mode 100644 index 0000000000..45b245791d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_adjacent/diff_JUnit4TestAdapter.java.txt @@ -0,0 +1,37 @@ +====1 +1:11a +2:12,13c +3:12,13c + import org.junit.runner.manipulation.GeneralOrdering; + import org.junit.runner.manipulation.InvalidOrderingException; +====1 +1:13c + import org.junit.runner.manipulation.Sortable; +2:15c +3:15c + import org.junit.runner.manipulation.Orderable; +==== +1:16c + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { +2:18c + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { +3:18,28c + /** + * The JUnit4TestAdapter enables running JUnit-4-style tests using a JUnit-3-style test runner. + * + *

    To use it, add the following to a test class: + *

    +        public static Test suite() {
    +          return new JUnit4TestAdapter(YourJUnit4TestClass.class);
    +        }
    +  
    + */ + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { +====1 +1:85a +2:88,91c +3:98,101c + + public void order(GeneralOrdering ordering) throws InvalidOrderingException { + ordering.apply(fRunner); + } diff --git a/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_ignorespace/diff_AllTests.java.txt b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_ignorespace/diff_AllTests.java.txt new file mode 100644 index 0000000000..fe14d49aba --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_ignorespace/diff_AllTests.java.txt @@ -0,0 +1,593 @@ +====1 +1:5c + import junit.samples.money.MoneyTest; +2:4a +3:4a +====1 +1:7,16c + import org.junit.experimental.categories.CategoryFilterFactoryTest; + import org.junit.internal.MethodSorterTest; + import org.junit.internal.matchers.StacktracePrintingMatcherTest; + import org.junit.internal.matchers.ThrowableCauseMatcherTest; + import org.junit.rules.DisableOnDebugTest; + import org.junit.rules.StopwatchTest; + import org.junit.runner.FilterFactoriesTest; + import org.junit.runner.FilterOptionIntegrationTest; + import org.junit.runner.JUnitCommandLineParseResultTest; + import org.junit.runner.JUnitCoreTest; +2:6,8c +3:6,8c + import org.junit.internal.AllInternalTests; + import org.junit.rules.AllRulesTests; + import org.junit.runner.AllRunnerTests; +====1 +1:18,21c + import org.junit.runner.notification.ConcurrentRunNotifierTest; + import org.junit.runner.notification.RunNotifierTest; + import org.junit.runner.notification.SynchronizedRunListenerTest; + import org.junit.runners.CustomBlockJUnit4ClassRunnerTest; +2:10c +3:10c + import org.junit.runners.AllRunnersTests; +==== +1:24,118c + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; +2:13,219c + <<<<<<< HEAD + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.OrderWithTest; + import org.junit.tests.manipulation.OrderableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; + ||||||| 9c337dcbb + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; + ======= + import org.junit.samples.AllSamplesTests; + import org.junit.tests.assertion.AllAssertionTests; + import org.junit.tests.deprecated.AllDeprecatedTests; + import org.junit.tests.description.AllDescriptionTests; + import org.junit.tests.experimental.AllExperimentalTests; + import org.junit.tests.junit3compatibility.AllJUnit3CompatibilityTests; + import org.junit.tests.listening.AllListeningTests; + import org.junit.tests.manipulation.AllManipulationTests; + import org.junit.tests.running.AllRunningTests; + import org.junit.tests.validation.AllValidationTests; + import org.junit.validator.AllValidatorTests; + >>>>>>> TEMP_RIGHT_BRANCH +3:13,23c + import org.junit.samples.AllSamplesTests; + import org.junit.tests.assertion.AllAssertionTests; + import org.junit.tests.deprecated.AllDeprecatedTests; + import org.junit.tests.description.AllDescriptionTests; + import org.junit.tests.experimental.AllExperimentalTests; + import org.junit.tests.junit3compatibility.AllJUnit3CompatibilityTests; + import org.junit.tests.listening.AllListeningTests; + import org.junit.tests.manipulation.AllManipulationTests; + import org.junit.tests.running.AllRunningTests; + import org.junit.tests.validation.AllValidationTests; + import org.junit.validator.AllValidatorTests; +====1 +1:120,123c + // These test files need to be cleaned. See + // https://sourceforge.net/pm/task.php?func=detailtask&project_task_id=136507&group_id=15278&group_project_id=51407 + + @SuppressWarnings("deprecation") +2:220a +3:24a +==== +1:126,165c + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, +2:223,323c + <<<<<<< HEAD + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + OrderableTest.class, + OrderWithTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, + ||||||| 9c337dcbb + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, + ======= + AllAssertionTests.class, + AllDeprecatedTests.class, + AllDescriptionTests.class, + AllExperimentalTests.class, + AllInternalTests.class, + AllJUnit3CompatibilityTests.class, + AllListeningTests.class, + AllManipulationTests.class, + AllRulesTests.class, + AllRunnersTests.class, + AllRunnerTests.class, + AllRunningTests.class, + AllSamplesTests.class, + AllValidationTests.class, + AllValidatorTests.class, + >>>>>>> TEMP_RIGHT_BRANCH +3:27,41c + AllAssertionTests.class, + AllDeprecatedTests.class, + AllDescriptionTests.class, + AllExperimentalTests.class, + AllInternalTests.class, + AllJUnit3CompatibilityTests.class, + AllListeningTests.class, + AllManipulationTests.class, + AllRulesTests.class, + AllRunnersTests.class, + AllRunnerTests.class, + AllRunningTests.class, + AllSamplesTests.class, + AllValidationTests.class, + AllValidatorTests.class, +====1 +1:167,238c + ExperimentalTests.class, + InheritedTestTest.class, + TestClassTest.class, + AllMembersSupplierTest.class, + SpecificDataPointsSupplierTest.class, + ParameterizedAssertionErrorTest.class, + WithDataPointMethod.class, + WithNamedDataPoints.class, + WithAutoGeneratedDataPoints.class, + MatcherTest.class, + ObjectContractTest.class, + TheoriesPerformanceTest.class, + JUnit4ClassRunnerTest.class, + UseSuiteAsASuperclassTest.class, + FilterableTest.class, + FilterTest.class, + MaxStarterTest.class, + JUnit38SortingTest.class, + MethodRulesTest.class, + TestRuleTest.class, + TimeoutRuleTest.class, + ParallelClassTest.class, + ParallelMethodTest.class, + ParentRunnerTest.class, + NameRulesTest.class, + ClassRulesTest.class, + ExpectedExceptionTest.class, + TempFolderRuleTest.class, + TemporaryFolderUsageTest.class, + ExternalResourceRuleTest.class, + VerifierRuleTest.class, + CategoryTest.class, + CategoriesAndParameterizedTest.class, + MultiCategoryTest.class, + JavadocTest.class, + ParentRunnerFilteringTest.class, + BlockJUnit4ClassRunnerOverrideTest.class, + RuleMemberValidatorTest.class, + RuleChainTest.class, + BlockJUnit4ClassRunnerTest.class, + CustomBlockJUnit4ClassRunnerTest.class, + MethodSorterTest.class, + TestedOnSupplierTest.class, + StacktracePrintingMatcherTest.class, + StopwatchTest.class, + RunNotifierTest.class, + ConcurrentRunNotifierTest.class, + SynchronizedRunListenerTest.class, + FilterOptionIntegrationTest.class, + JUnitCommandLineParseResultTest.class, + FilterFactoriesTest.class, + CategoryFilterFactoryTest.class, + FrameworkFieldTest.class, + FrameworkMethodTest.class, + FailOnTimeoutTest.class, + JUnitCoreTest.class, + TestWithParametersTest.class, + ParameterizedNamesTest.class, + PublicClassValidatorTest.class, + DisableOnDebugTest.class, + ThrowableCauseMatcherTest.class, + TestWatcherTest.class, + WithParameterSupplier.class, + FailingDataPointMethods.class, + TypeMatchingBetweenMultiDataPointsMethod.class, + TheoriesPerformanceTest.class, + MoneyTest.class, + CategoryValidatorTest.class, + ForwardCompatibilityPrintingTest.class, + DescriptionTest.class, + ErrorReportingRunnerTest.class, + TemporaryFolderRuleAssuredDeletionTest.class +2:325c +3:43c + ObjectContractTest.class diff --git a/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_ignorespace/diff_JUnit4TestAdapter.java.txt b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_ignorespace/diff_JUnit4TestAdapter.java.txt new file mode 100644 index 0000000000..203ee36854 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_ignorespace/diff_JUnit4TestAdapter.java.txt @@ -0,0 +1,53 @@ +====1 +1:11a +2:12,13c +3:12,13c + import org.junit.runner.manipulation.GeneralOrdering; + import org.junit.runner.manipulation.InvalidOrderingException; +====1 +1:13c + import org.junit.runner.manipulation.Sortable; +2:15c +3:15c + import org.junit.runner.manipulation.Orderable; +==== +1:16c + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { +2:18,34c + <<<<<<< HEAD + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { + ||||||| 9c337dcbb + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { + ======= + /** + * The JUnit4TestAdapter enables running JUnit-4-style tests using a JUnit-3-style test runner. + * + *

    To use it, add the following to a test class: + *

    +        public static Test suite() {
    +          return new JUnit4TestAdapter(YourJUnit4TestClass.class);
    +        }
    +  
    + */ + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { + >>>>>>> TEMP_RIGHT_BRANCH +3:18,28c + /** + * The JUnit4TestAdapter enables running JUnit-4-style tests using a JUnit-3-style test runner. + * + *

    To use it, add the following to a test class: + *

    +        public static Test suite() {
    +          return new JUnit4TestAdapter(YourJUnit4TestClass.class);
    +        }
    +  
    + */ + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { +====1 +1:85a +2:104,107c +3:98,101c + + public void order(GeneralOrdering ordering) throws InvalidOrderingException { + ordering.apply(fRunner); + } diff --git a/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_imports/diff_AllTests.java.txt b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_imports/diff_AllTests.java.txt new file mode 100644 index 0000000000..3a80709a70 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_imports/diff_AllTests.java.txt @@ -0,0 +1,424 @@ +====3 +1:5c +2:5c + import junit.samples.money.MoneyTest; +3:4a +====3 +1:7,16c +2:7,16c + import org.junit.experimental.categories.CategoryFilterFactoryTest; + import org.junit.internal.MethodSorterTest; + import org.junit.internal.matchers.StacktracePrintingMatcherTest; + import org.junit.internal.matchers.ThrowableCauseMatcherTest; + import org.junit.rules.DisableOnDebugTest; + import org.junit.rules.StopwatchTest; + import org.junit.runner.FilterFactoriesTest; + import org.junit.runner.FilterOptionIntegrationTest; + import org.junit.runner.JUnitCommandLineParseResultTest; + import org.junit.runner.JUnitCoreTest; +3:6,8c + import org.junit.internal.AllInternalTests; + import org.junit.rules.AllRulesTests; + import org.junit.runner.AllRunnerTests; +====3 +1:18,21c +2:18,21c + import org.junit.runner.notification.ConcurrentRunNotifierTest; + import org.junit.runner.notification.RunNotifierTest; + import org.junit.runner.notification.SynchronizedRunListenerTest; + import org.junit.runners.CustomBlockJUnit4ClassRunnerTest; +3:10c + import org.junit.runners.AllRunnersTests; +==== +1:24,118c + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; +2:24,120c + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.OrderWithTest; + import org.junit.tests.manipulation.OrderableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; +3:13,23c + import org.junit.samples.AllSamplesTests; + import org.junit.tests.assertion.AllAssertionTests; + import org.junit.tests.deprecated.AllDeprecatedTests; + import org.junit.tests.description.AllDescriptionTests; + import org.junit.tests.experimental.AllExperimentalTests; + import org.junit.tests.junit3compatibility.AllJUnit3CompatibilityTests; + import org.junit.tests.listening.AllListeningTests; + import org.junit.tests.manipulation.AllManipulationTests; + import org.junit.tests.running.AllRunningTests; + import org.junit.tests.validation.AllValidationTests; + import org.junit.validator.AllValidatorTests; +====3 +1:120,123c +2:122,125c + // These test files need to be cleaned. See + // https://sourceforge.net/pm/task.php?func=detailtask&project_task_id=136507&group_id=15278&group_project_id=51407 + + @SuppressWarnings("deprecation") +3:24a +==== +1:126,165c + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, +2:128,169c + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + OrderableTest.class, + OrderWithTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, +3:27,41c + AllAssertionTests.class, + AllDeprecatedTests.class, + AllDescriptionTests.class, + AllExperimentalTests.class, + AllInternalTests.class, + AllJUnit3CompatibilityTests.class, + AllListeningTests.class, + AllManipulationTests.class, + AllRulesTests.class, + AllRunnersTests.class, + AllRunnerTests.class, + AllRunningTests.class, + AllSamplesTests.class, + AllValidationTests.class, + AllValidatorTests.class, +====3 +1:167,238c +2:171,242c + ExperimentalTests.class, + InheritedTestTest.class, + TestClassTest.class, + AllMembersSupplierTest.class, + SpecificDataPointsSupplierTest.class, + ParameterizedAssertionErrorTest.class, + WithDataPointMethod.class, + WithNamedDataPoints.class, + WithAutoGeneratedDataPoints.class, + MatcherTest.class, + ObjectContractTest.class, + TheoriesPerformanceTest.class, + JUnit4ClassRunnerTest.class, + UseSuiteAsASuperclassTest.class, + FilterableTest.class, + FilterTest.class, + MaxStarterTest.class, + JUnit38SortingTest.class, + MethodRulesTest.class, + TestRuleTest.class, + TimeoutRuleTest.class, + ParallelClassTest.class, + ParallelMethodTest.class, + ParentRunnerTest.class, + NameRulesTest.class, + ClassRulesTest.class, + ExpectedExceptionTest.class, + TempFolderRuleTest.class, + TemporaryFolderUsageTest.class, + ExternalResourceRuleTest.class, + VerifierRuleTest.class, + CategoryTest.class, + CategoriesAndParameterizedTest.class, + MultiCategoryTest.class, + JavadocTest.class, + ParentRunnerFilteringTest.class, + BlockJUnit4ClassRunnerOverrideTest.class, + RuleMemberValidatorTest.class, + RuleChainTest.class, + BlockJUnit4ClassRunnerTest.class, + CustomBlockJUnit4ClassRunnerTest.class, + MethodSorterTest.class, + TestedOnSupplierTest.class, + StacktracePrintingMatcherTest.class, + StopwatchTest.class, + RunNotifierTest.class, + ConcurrentRunNotifierTest.class, + SynchronizedRunListenerTest.class, + FilterOptionIntegrationTest.class, + JUnitCommandLineParseResultTest.class, + FilterFactoriesTest.class, + CategoryFilterFactoryTest.class, + FrameworkFieldTest.class, + FrameworkMethodTest.class, + FailOnTimeoutTest.class, + JUnitCoreTest.class, + TestWithParametersTest.class, + ParameterizedNamesTest.class, + PublicClassValidatorTest.class, + DisableOnDebugTest.class, + ThrowableCauseMatcherTest.class, + TestWatcherTest.class, + WithParameterSupplier.class, + FailingDataPointMethods.class, + TypeMatchingBetweenMultiDataPointsMethod.class, + TheoriesPerformanceTest.class, + MoneyTest.class, + CategoryValidatorTest.class, + ForwardCompatibilityPrintingTest.class, + DescriptionTest.class, + ErrorReportingRunnerTest.class, + TemporaryFolderRuleAssuredDeletionTest.class +3:43c + ObjectContractTest.class diff --git a/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_imports/diff_JUnit4TestAdapter.java.txt b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_imports/diff_JUnit4TestAdapter.java.txt new file mode 100644 index 0000000000..45b245791d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_imports/diff_JUnit4TestAdapter.java.txt @@ -0,0 +1,37 @@ +====1 +1:11a +2:12,13c +3:12,13c + import org.junit.runner.manipulation.GeneralOrdering; + import org.junit.runner.manipulation.InvalidOrderingException; +====1 +1:13c + import org.junit.runner.manipulation.Sortable; +2:15c +3:15c + import org.junit.runner.manipulation.Orderable; +==== +1:16c + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { +2:18c + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { +3:18,28c + /** + * The JUnit4TestAdapter enables running JUnit-4-style tests using a JUnit-3-style test runner. + * + *

    To use it, add the following to a test class: + *

    +        public static Test suite() {
    +          return new JUnit4TestAdapter(YourJUnit4TestClass.class);
    +        }
    +  
    + */ + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { +====1 +1:85a +2:88,91c +3:98,101c + + public void order(GeneralOrdering ordering) throws InvalidOrderingException { + ordering.apply(fRunner); + } diff --git a/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_imports_ignorespace/diff_AllTests.java.txt b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_imports_ignorespace/diff_AllTests.java.txt new file mode 100644 index 0000000000..3a80709a70 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_imports_ignorespace/diff_AllTests.java.txt @@ -0,0 +1,424 @@ +====3 +1:5c +2:5c + import junit.samples.money.MoneyTest; +3:4a +====3 +1:7,16c +2:7,16c + import org.junit.experimental.categories.CategoryFilterFactoryTest; + import org.junit.internal.MethodSorterTest; + import org.junit.internal.matchers.StacktracePrintingMatcherTest; + import org.junit.internal.matchers.ThrowableCauseMatcherTest; + import org.junit.rules.DisableOnDebugTest; + import org.junit.rules.StopwatchTest; + import org.junit.runner.FilterFactoriesTest; + import org.junit.runner.FilterOptionIntegrationTest; + import org.junit.runner.JUnitCommandLineParseResultTest; + import org.junit.runner.JUnitCoreTest; +3:6,8c + import org.junit.internal.AllInternalTests; + import org.junit.rules.AllRulesTests; + import org.junit.runner.AllRunnerTests; +====3 +1:18,21c +2:18,21c + import org.junit.runner.notification.ConcurrentRunNotifierTest; + import org.junit.runner.notification.RunNotifierTest; + import org.junit.runner.notification.SynchronizedRunListenerTest; + import org.junit.runners.CustomBlockJUnit4ClassRunnerTest; +3:10c + import org.junit.runners.AllRunnersTests; +==== +1:24,118c + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; +2:24,120c + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.OrderWithTest; + import org.junit.tests.manipulation.OrderableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; +3:13,23c + import org.junit.samples.AllSamplesTests; + import org.junit.tests.assertion.AllAssertionTests; + import org.junit.tests.deprecated.AllDeprecatedTests; + import org.junit.tests.description.AllDescriptionTests; + import org.junit.tests.experimental.AllExperimentalTests; + import org.junit.tests.junit3compatibility.AllJUnit3CompatibilityTests; + import org.junit.tests.listening.AllListeningTests; + import org.junit.tests.manipulation.AllManipulationTests; + import org.junit.tests.running.AllRunningTests; + import org.junit.tests.validation.AllValidationTests; + import org.junit.validator.AllValidatorTests; +====3 +1:120,123c +2:122,125c + // These test files need to be cleaned. See + // https://sourceforge.net/pm/task.php?func=detailtask&project_task_id=136507&group_id=15278&group_project_id=51407 + + @SuppressWarnings("deprecation") +3:24a +==== +1:126,165c + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, +2:128,169c + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + OrderableTest.class, + OrderWithTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, +3:27,41c + AllAssertionTests.class, + AllDeprecatedTests.class, + AllDescriptionTests.class, + AllExperimentalTests.class, + AllInternalTests.class, + AllJUnit3CompatibilityTests.class, + AllListeningTests.class, + AllManipulationTests.class, + AllRulesTests.class, + AllRunnersTests.class, + AllRunnerTests.class, + AllRunningTests.class, + AllSamplesTests.class, + AllValidationTests.class, + AllValidatorTests.class, +====3 +1:167,238c +2:171,242c + ExperimentalTests.class, + InheritedTestTest.class, + TestClassTest.class, + AllMembersSupplierTest.class, + SpecificDataPointsSupplierTest.class, + ParameterizedAssertionErrorTest.class, + WithDataPointMethod.class, + WithNamedDataPoints.class, + WithAutoGeneratedDataPoints.class, + MatcherTest.class, + ObjectContractTest.class, + TheoriesPerformanceTest.class, + JUnit4ClassRunnerTest.class, + UseSuiteAsASuperclassTest.class, + FilterableTest.class, + FilterTest.class, + MaxStarterTest.class, + JUnit38SortingTest.class, + MethodRulesTest.class, + TestRuleTest.class, + TimeoutRuleTest.class, + ParallelClassTest.class, + ParallelMethodTest.class, + ParentRunnerTest.class, + NameRulesTest.class, + ClassRulesTest.class, + ExpectedExceptionTest.class, + TempFolderRuleTest.class, + TemporaryFolderUsageTest.class, + ExternalResourceRuleTest.class, + VerifierRuleTest.class, + CategoryTest.class, + CategoriesAndParameterizedTest.class, + MultiCategoryTest.class, + JavadocTest.class, + ParentRunnerFilteringTest.class, + BlockJUnit4ClassRunnerOverrideTest.class, + RuleMemberValidatorTest.class, + RuleChainTest.class, + BlockJUnit4ClassRunnerTest.class, + CustomBlockJUnit4ClassRunnerTest.class, + MethodSorterTest.class, + TestedOnSupplierTest.class, + StacktracePrintingMatcherTest.class, + StopwatchTest.class, + RunNotifierTest.class, + ConcurrentRunNotifierTest.class, + SynchronizedRunListenerTest.class, + FilterOptionIntegrationTest.class, + JUnitCommandLineParseResultTest.class, + FilterFactoriesTest.class, + CategoryFilterFactoryTest.class, + FrameworkFieldTest.class, + FrameworkMethodTest.class, + FailOnTimeoutTest.class, + JUnitCoreTest.class, + TestWithParametersTest.class, + ParameterizedNamesTest.class, + PublicClassValidatorTest.class, + DisableOnDebugTest.class, + ThrowableCauseMatcherTest.class, + TestWatcherTest.class, + WithParameterSupplier.class, + FailingDataPointMethods.class, + TypeMatchingBetweenMultiDataPointsMethod.class, + TheoriesPerformanceTest.class, + MoneyTest.class, + CategoryValidatorTest.class, + ForwardCompatibilityPrintingTest.class, + DescriptionTest.class, + ErrorReportingRunnerTest.class, + TemporaryFolderRuleAssuredDeletionTest.class +3:43c + ObjectContractTest.class diff --git a/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_imports_ignorespace/diff_JUnit4TestAdapter.java.txt b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_imports_ignorespace/diff_JUnit4TestAdapter.java.txt new file mode 100644 index 0000000000..45b245791d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_ort_imports_ignorespace/diff_JUnit4TestAdapter.java.txt @@ -0,0 +1,37 @@ +====1 +1:11a +2:12,13c +3:12,13c + import org.junit.runner.manipulation.GeneralOrdering; + import org.junit.runner.manipulation.InvalidOrderingException; +====1 +1:13c + import org.junit.runner.manipulation.Sortable; +2:15c +3:15c + import org.junit.runner.manipulation.Orderable; +==== +1:16c + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { +2:18c + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { +3:18,28c + /** + * The JUnit4TestAdapter enables running JUnit-4-style tests using a JUnit-3-style test runner. + * + *

    To use it, add the following to a test class: + *

    +        public static Test suite() {
    +          return new JUnit4TestAdapter(YourJUnit4TestClass.class);
    +        }
    +  
    + */ + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { +====1 +1:85a +2:88,91c +3:98,101c + + public void order(GeneralOrdering ordering) throws InvalidOrderingException { + ordering.apply(fRunner); + } diff --git a/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_histogram/diff_AllTests.java.txt b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_histogram/diff_AllTests.java.txt new file mode 100644 index 0000000000..fe14d49aba --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_histogram/diff_AllTests.java.txt @@ -0,0 +1,593 @@ +====1 +1:5c + import junit.samples.money.MoneyTest; +2:4a +3:4a +====1 +1:7,16c + import org.junit.experimental.categories.CategoryFilterFactoryTest; + import org.junit.internal.MethodSorterTest; + import org.junit.internal.matchers.StacktracePrintingMatcherTest; + import org.junit.internal.matchers.ThrowableCauseMatcherTest; + import org.junit.rules.DisableOnDebugTest; + import org.junit.rules.StopwatchTest; + import org.junit.runner.FilterFactoriesTest; + import org.junit.runner.FilterOptionIntegrationTest; + import org.junit.runner.JUnitCommandLineParseResultTest; + import org.junit.runner.JUnitCoreTest; +2:6,8c +3:6,8c + import org.junit.internal.AllInternalTests; + import org.junit.rules.AllRulesTests; + import org.junit.runner.AllRunnerTests; +====1 +1:18,21c + import org.junit.runner.notification.ConcurrentRunNotifierTest; + import org.junit.runner.notification.RunNotifierTest; + import org.junit.runner.notification.SynchronizedRunListenerTest; + import org.junit.runners.CustomBlockJUnit4ClassRunnerTest; +2:10c +3:10c + import org.junit.runners.AllRunnersTests; +==== +1:24,118c + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; +2:13,219c + <<<<<<< HEAD + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.OrderWithTest; + import org.junit.tests.manipulation.OrderableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; + ||||||| 9c337dcbb + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; + ======= + import org.junit.samples.AllSamplesTests; + import org.junit.tests.assertion.AllAssertionTests; + import org.junit.tests.deprecated.AllDeprecatedTests; + import org.junit.tests.description.AllDescriptionTests; + import org.junit.tests.experimental.AllExperimentalTests; + import org.junit.tests.junit3compatibility.AllJUnit3CompatibilityTests; + import org.junit.tests.listening.AllListeningTests; + import org.junit.tests.manipulation.AllManipulationTests; + import org.junit.tests.running.AllRunningTests; + import org.junit.tests.validation.AllValidationTests; + import org.junit.validator.AllValidatorTests; + >>>>>>> TEMP_RIGHT_BRANCH +3:13,23c + import org.junit.samples.AllSamplesTests; + import org.junit.tests.assertion.AllAssertionTests; + import org.junit.tests.deprecated.AllDeprecatedTests; + import org.junit.tests.description.AllDescriptionTests; + import org.junit.tests.experimental.AllExperimentalTests; + import org.junit.tests.junit3compatibility.AllJUnit3CompatibilityTests; + import org.junit.tests.listening.AllListeningTests; + import org.junit.tests.manipulation.AllManipulationTests; + import org.junit.tests.running.AllRunningTests; + import org.junit.tests.validation.AllValidationTests; + import org.junit.validator.AllValidatorTests; +====1 +1:120,123c + // These test files need to be cleaned. See + // https://sourceforge.net/pm/task.php?func=detailtask&project_task_id=136507&group_id=15278&group_project_id=51407 + + @SuppressWarnings("deprecation") +2:220a +3:24a +==== +1:126,165c + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, +2:223,323c + <<<<<<< HEAD + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + OrderableTest.class, + OrderWithTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, + ||||||| 9c337dcbb + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, + ======= + AllAssertionTests.class, + AllDeprecatedTests.class, + AllDescriptionTests.class, + AllExperimentalTests.class, + AllInternalTests.class, + AllJUnit3CompatibilityTests.class, + AllListeningTests.class, + AllManipulationTests.class, + AllRulesTests.class, + AllRunnersTests.class, + AllRunnerTests.class, + AllRunningTests.class, + AllSamplesTests.class, + AllValidationTests.class, + AllValidatorTests.class, + >>>>>>> TEMP_RIGHT_BRANCH +3:27,41c + AllAssertionTests.class, + AllDeprecatedTests.class, + AllDescriptionTests.class, + AllExperimentalTests.class, + AllInternalTests.class, + AllJUnit3CompatibilityTests.class, + AllListeningTests.class, + AllManipulationTests.class, + AllRulesTests.class, + AllRunnersTests.class, + AllRunnerTests.class, + AllRunningTests.class, + AllSamplesTests.class, + AllValidationTests.class, + AllValidatorTests.class, +====1 +1:167,238c + ExperimentalTests.class, + InheritedTestTest.class, + TestClassTest.class, + AllMembersSupplierTest.class, + SpecificDataPointsSupplierTest.class, + ParameterizedAssertionErrorTest.class, + WithDataPointMethod.class, + WithNamedDataPoints.class, + WithAutoGeneratedDataPoints.class, + MatcherTest.class, + ObjectContractTest.class, + TheoriesPerformanceTest.class, + JUnit4ClassRunnerTest.class, + UseSuiteAsASuperclassTest.class, + FilterableTest.class, + FilterTest.class, + MaxStarterTest.class, + JUnit38SortingTest.class, + MethodRulesTest.class, + TestRuleTest.class, + TimeoutRuleTest.class, + ParallelClassTest.class, + ParallelMethodTest.class, + ParentRunnerTest.class, + NameRulesTest.class, + ClassRulesTest.class, + ExpectedExceptionTest.class, + TempFolderRuleTest.class, + TemporaryFolderUsageTest.class, + ExternalResourceRuleTest.class, + VerifierRuleTest.class, + CategoryTest.class, + CategoriesAndParameterizedTest.class, + MultiCategoryTest.class, + JavadocTest.class, + ParentRunnerFilteringTest.class, + BlockJUnit4ClassRunnerOverrideTest.class, + RuleMemberValidatorTest.class, + RuleChainTest.class, + BlockJUnit4ClassRunnerTest.class, + CustomBlockJUnit4ClassRunnerTest.class, + MethodSorterTest.class, + TestedOnSupplierTest.class, + StacktracePrintingMatcherTest.class, + StopwatchTest.class, + RunNotifierTest.class, + ConcurrentRunNotifierTest.class, + SynchronizedRunListenerTest.class, + FilterOptionIntegrationTest.class, + JUnitCommandLineParseResultTest.class, + FilterFactoriesTest.class, + CategoryFilterFactoryTest.class, + FrameworkFieldTest.class, + FrameworkMethodTest.class, + FailOnTimeoutTest.class, + JUnitCoreTest.class, + TestWithParametersTest.class, + ParameterizedNamesTest.class, + PublicClassValidatorTest.class, + DisableOnDebugTest.class, + ThrowableCauseMatcherTest.class, + TestWatcherTest.class, + WithParameterSupplier.class, + FailingDataPointMethods.class, + TypeMatchingBetweenMultiDataPointsMethod.class, + TheoriesPerformanceTest.class, + MoneyTest.class, + CategoryValidatorTest.class, + ForwardCompatibilityPrintingTest.class, + DescriptionTest.class, + ErrorReportingRunnerTest.class, + TemporaryFolderRuleAssuredDeletionTest.class +2:325c +3:43c + ObjectContractTest.class diff --git a/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_histogram/diff_JUnit4TestAdapter.java.txt b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_histogram/diff_JUnit4TestAdapter.java.txt new file mode 100644 index 0000000000..203ee36854 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_histogram/diff_JUnit4TestAdapter.java.txt @@ -0,0 +1,53 @@ +====1 +1:11a +2:12,13c +3:12,13c + import org.junit.runner.manipulation.GeneralOrdering; + import org.junit.runner.manipulation.InvalidOrderingException; +====1 +1:13c + import org.junit.runner.manipulation.Sortable; +2:15c +3:15c + import org.junit.runner.manipulation.Orderable; +==== +1:16c + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { +2:18,34c + <<<<<<< HEAD + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { + ||||||| 9c337dcbb + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { + ======= + /** + * The JUnit4TestAdapter enables running JUnit-4-style tests using a JUnit-3-style test runner. + * + *

    To use it, add the following to a test class: + *

    +        public static Test suite() {
    +          return new JUnit4TestAdapter(YourJUnit4TestClass.class);
    +        }
    +  
    + */ + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { + >>>>>>> TEMP_RIGHT_BRANCH +3:18,28c + /** + * The JUnit4TestAdapter enables running JUnit-4-style tests using a JUnit-3-style test runner. + * + *

    To use it, add the following to a test class: + *

    +        public static Test suite() {
    +          return new JUnit4TestAdapter(YourJUnit4TestClass.class);
    +        }
    +  
    + */ + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { +====1 +1:85a +2:104,107c +3:98,101c + + public void order(GeneralOrdering ordering) throws InvalidOrderingException { + ordering.apply(fRunner); + } diff --git a/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_ignorespace/diff_AllTests.java.txt b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_ignorespace/diff_AllTests.java.txt new file mode 100644 index 0000000000..fe14d49aba --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_ignorespace/diff_AllTests.java.txt @@ -0,0 +1,593 @@ +====1 +1:5c + import junit.samples.money.MoneyTest; +2:4a +3:4a +====1 +1:7,16c + import org.junit.experimental.categories.CategoryFilterFactoryTest; + import org.junit.internal.MethodSorterTest; + import org.junit.internal.matchers.StacktracePrintingMatcherTest; + import org.junit.internal.matchers.ThrowableCauseMatcherTest; + import org.junit.rules.DisableOnDebugTest; + import org.junit.rules.StopwatchTest; + import org.junit.runner.FilterFactoriesTest; + import org.junit.runner.FilterOptionIntegrationTest; + import org.junit.runner.JUnitCommandLineParseResultTest; + import org.junit.runner.JUnitCoreTest; +2:6,8c +3:6,8c + import org.junit.internal.AllInternalTests; + import org.junit.rules.AllRulesTests; + import org.junit.runner.AllRunnerTests; +====1 +1:18,21c + import org.junit.runner.notification.ConcurrentRunNotifierTest; + import org.junit.runner.notification.RunNotifierTest; + import org.junit.runner.notification.SynchronizedRunListenerTest; + import org.junit.runners.CustomBlockJUnit4ClassRunnerTest; +2:10c +3:10c + import org.junit.runners.AllRunnersTests; +==== +1:24,118c + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; +2:13,219c + <<<<<<< HEAD + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.OrderWithTest; + import org.junit.tests.manipulation.OrderableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; + ||||||| 9c337dcbb + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; + ======= + import org.junit.samples.AllSamplesTests; + import org.junit.tests.assertion.AllAssertionTests; + import org.junit.tests.deprecated.AllDeprecatedTests; + import org.junit.tests.description.AllDescriptionTests; + import org.junit.tests.experimental.AllExperimentalTests; + import org.junit.tests.junit3compatibility.AllJUnit3CompatibilityTests; + import org.junit.tests.listening.AllListeningTests; + import org.junit.tests.manipulation.AllManipulationTests; + import org.junit.tests.running.AllRunningTests; + import org.junit.tests.validation.AllValidationTests; + import org.junit.validator.AllValidatorTests; + >>>>>>> TEMP_RIGHT_BRANCH +3:13,23c + import org.junit.samples.AllSamplesTests; + import org.junit.tests.assertion.AllAssertionTests; + import org.junit.tests.deprecated.AllDeprecatedTests; + import org.junit.tests.description.AllDescriptionTests; + import org.junit.tests.experimental.AllExperimentalTests; + import org.junit.tests.junit3compatibility.AllJUnit3CompatibilityTests; + import org.junit.tests.listening.AllListeningTests; + import org.junit.tests.manipulation.AllManipulationTests; + import org.junit.tests.running.AllRunningTests; + import org.junit.tests.validation.AllValidationTests; + import org.junit.validator.AllValidatorTests; +====1 +1:120,123c + // These test files need to be cleaned. See + // https://sourceforge.net/pm/task.php?func=detailtask&project_task_id=136507&group_id=15278&group_project_id=51407 + + @SuppressWarnings("deprecation") +2:220a +3:24a +==== +1:126,165c + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, +2:223,323c + <<<<<<< HEAD + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + OrderableTest.class, + OrderWithTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, + ||||||| 9c337dcbb + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, + ======= + AllAssertionTests.class, + AllDeprecatedTests.class, + AllDescriptionTests.class, + AllExperimentalTests.class, + AllInternalTests.class, + AllJUnit3CompatibilityTests.class, + AllListeningTests.class, + AllManipulationTests.class, + AllRulesTests.class, + AllRunnersTests.class, + AllRunnerTests.class, + AllRunningTests.class, + AllSamplesTests.class, + AllValidationTests.class, + AllValidatorTests.class, + >>>>>>> TEMP_RIGHT_BRANCH +3:27,41c + AllAssertionTests.class, + AllDeprecatedTests.class, + AllDescriptionTests.class, + AllExperimentalTests.class, + AllInternalTests.class, + AllJUnit3CompatibilityTests.class, + AllListeningTests.class, + AllManipulationTests.class, + AllRulesTests.class, + AllRunnersTests.class, + AllRunnerTests.class, + AllRunningTests.class, + AllSamplesTests.class, + AllValidationTests.class, + AllValidatorTests.class, +====1 +1:167,238c + ExperimentalTests.class, + InheritedTestTest.class, + TestClassTest.class, + AllMembersSupplierTest.class, + SpecificDataPointsSupplierTest.class, + ParameterizedAssertionErrorTest.class, + WithDataPointMethod.class, + WithNamedDataPoints.class, + WithAutoGeneratedDataPoints.class, + MatcherTest.class, + ObjectContractTest.class, + TheoriesPerformanceTest.class, + JUnit4ClassRunnerTest.class, + UseSuiteAsASuperclassTest.class, + FilterableTest.class, + FilterTest.class, + MaxStarterTest.class, + JUnit38SortingTest.class, + MethodRulesTest.class, + TestRuleTest.class, + TimeoutRuleTest.class, + ParallelClassTest.class, + ParallelMethodTest.class, + ParentRunnerTest.class, + NameRulesTest.class, + ClassRulesTest.class, + ExpectedExceptionTest.class, + TempFolderRuleTest.class, + TemporaryFolderUsageTest.class, + ExternalResourceRuleTest.class, + VerifierRuleTest.class, + CategoryTest.class, + CategoriesAndParameterizedTest.class, + MultiCategoryTest.class, + JavadocTest.class, + ParentRunnerFilteringTest.class, + BlockJUnit4ClassRunnerOverrideTest.class, + RuleMemberValidatorTest.class, + RuleChainTest.class, + BlockJUnit4ClassRunnerTest.class, + CustomBlockJUnit4ClassRunnerTest.class, + MethodSorterTest.class, + TestedOnSupplierTest.class, + StacktracePrintingMatcherTest.class, + StopwatchTest.class, + RunNotifierTest.class, + ConcurrentRunNotifierTest.class, + SynchronizedRunListenerTest.class, + FilterOptionIntegrationTest.class, + JUnitCommandLineParseResultTest.class, + FilterFactoriesTest.class, + CategoryFilterFactoryTest.class, + FrameworkFieldTest.class, + FrameworkMethodTest.class, + FailOnTimeoutTest.class, + JUnitCoreTest.class, + TestWithParametersTest.class, + ParameterizedNamesTest.class, + PublicClassValidatorTest.class, + DisableOnDebugTest.class, + ThrowableCauseMatcherTest.class, + TestWatcherTest.class, + WithParameterSupplier.class, + FailingDataPointMethods.class, + TypeMatchingBetweenMultiDataPointsMethod.class, + TheoriesPerformanceTest.class, + MoneyTest.class, + CategoryValidatorTest.class, + ForwardCompatibilityPrintingTest.class, + DescriptionTest.class, + ErrorReportingRunnerTest.class, + TemporaryFolderRuleAssuredDeletionTest.class +2:325c +3:43c + ObjectContractTest.class diff --git a/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_ignorespace/diff_JUnit4TestAdapter.java.txt b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_ignorespace/diff_JUnit4TestAdapter.java.txt new file mode 100644 index 0000000000..203ee36854 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_ignorespace/diff_JUnit4TestAdapter.java.txt @@ -0,0 +1,53 @@ +====1 +1:11a +2:12,13c +3:12,13c + import org.junit.runner.manipulation.GeneralOrdering; + import org.junit.runner.manipulation.InvalidOrderingException; +====1 +1:13c + import org.junit.runner.manipulation.Sortable; +2:15c +3:15c + import org.junit.runner.manipulation.Orderable; +==== +1:16c + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { +2:18,34c + <<<<<<< HEAD + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { + ||||||| 9c337dcbb + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { + ======= + /** + * The JUnit4TestAdapter enables running JUnit-4-style tests using a JUnit-3-style test runner. + * + *

    To use it, add the following to a test class: + *

    +        public static Test suite() {
    +          return new JUnit4TestAdapter(YourJUnit4TestClass.class);
    +        }
    +  
    + */ + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { + >>>>>>> TEMP_RIGHT_BRANCH +3:18,28c + /** + * The JUnit4TestAdapter enables running JUnit-4-style tests using a JUnit-3-style test runner. + * + *

    To use it, add the following to a test class: + *

    +        public static Test suite() {
    +          return new JUnit4TestAdapter(YourJUnit4TestClass.class);
    +        }
    +  
    + */ + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { +====1 +1:85a +2:104,107c +3:98,101c + + public void order(GeneralOrdering ordering) throws InvalidOrderingException { + ordering.apply(fRunner); + } diff --git a/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_minimal/diff_AllTests.java.txt b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_minimal/diff_AllTests.java.txt new file mode 100644 index 0000000000..fe14d49aba --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_minimal/diff_AllTests.java.txt @@ -0,0 +1,593 @@ +====1 +1:5c + import junit.samples.money.MoneyTest; +2:4a +3:4a +====1 +1:7,16c + import org.junit.experimental.categories.CategoryFilterFactoryTest; + import org.junit.internal.MethodSorterTest; + import org.junit.internal.matchers.StacktracePrintingMatcherTest; + import org.junit.internal.matchers.ThrowableCauseMatcherTest; + import org.junit.rules.DisableOnDebugTest; + import org.junit.rules.StopwatchTest; + import org.junit.runner.FilterFactoriesTest; + import org.junit.runner.FilterOptionIntegrationTest; + import org.junit.runner.JUnitCommandLineParseResultTest; + import org.junit.runner.JUnitCoreTest; +2:6,8c +3:6,8c + import org.junit.internal.AllInternalTests; + import org.junit.rules.AllRulesTests; + import org.junit.runner.AllRunnerTests; +====1 +1:18,21c + import org.junit.runner.notification.ConcurrentRunNotifierTest; + import org.junit.runner.notification.RunNotifierTest; + import org.junit.runner.notification.SynchronizedRunListenerTest; + import org.junit.runners.CustomBlockJUnit4ClassRunnerTest; +2:10c +3:10c + import org.junit.runners.AllRunnersTests; +==== +1:24,118c + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; +2:13,219c + <<<<<<< HEAD + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.OrderWithTest; + import org.junit.tests.manipulation.OrderableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; + ||||||| 9c337dcbb + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; + ======= + import org.junit.samples.AllSamplesTests; + import org.junit.tests.assertion.AllAssertionTests; + import org.junit.tests.deprecated.AllDeprecatedTests; + import org.junit.tests.description.AllDescriptionTests; + import org.junit.tests.experimental.AllExperimentalTests; + import org.junit.tests.junit3compatibility.AllJUnit3CompatibilityTests; + import org.junit.tests.listening.AllListeningTests; + import org.junit.tests.manipulation.AllManipulationTests; + import org.junit.tests.running.AllRunningTests; + import org.junit.tests.validation.AllValidationTests; + import org.junit.validator.AllValidatorTests; + >>>>>>> TEMP_RIGHT_BRANCH +3:13,23c + import org.junit.samples.AllSamplesTests; + import org.junit.tests.assertion.AllAssertionTests; + import org.junit.tests.deprecated.AllDeprecatedTests; + import org.junit.tests.description.AllDescriptionTests; + import org.junit.tests.experimental.AllExperimentalTests; + import org.junit.tests.junit3compatibility.AllJUnit3CompatibilityTests; + import org.junit.tests.listening.AllListeningTests; + import org.junit.tests.manipulation.AllManipulationTests; + import org.junit.tests.running.AllRunningTests; + import org.junit.tests.validation.AllValidationTests; + import org.junit.validator.AllValidatorTests; +====1 +1:120,123c + // These test files need to be cleaned. See + // https://sourceforge.net/pm/task.php?func=detailtask&project_task_id=136507&group_id=15278&group_project_id=51407 + + @SuppressWarnings("deprecation") +2:220a +3:24a +==== +1:126,165c + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, +2:223,323c + <<<<<<< HEAD + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + OrderableTest.class, + OrderWithTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, + ||||||| 9c337dcbb + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, + ======= + AllAssertionTests.class, + AllDeprecatedTests.class, + AllDescriptionTests.class, + AllExperimentalTests.class, + AllInternalTests.class, + AllJUnit3CompatibilityTests.class, + AllListeningTests.class, + AllManipulationTests.class, + AllRulesTests.class, + AllRunnersTests.class, + AllRunnerTests.class, + AllRunningTests.class, + AllSamplesTests.class, + AllValidationTests.class, + AllValidatorTests.class, + >>>>>>> TEMP_RIGHT_BRANCH +3:27,41c + AllAssertionTests.class, + AllDeprecatedTests.class, + AllDescriptionTests.class, + AllExperimentalTests.class, + AllInternalTests.class, + AllJUnit3CompatibilityTests.class, + AllListeningTests.class, + AllManipulationTests.class, + AllRulesTests.class, + AllRunnersTests.class, + AllRunnerTests.class, + AllRunningTests.class, + AllSamplesTests.class, + AllValidationTests.class, + AllValidatorTests.class, +====1 +1:167,238c + ExperimentalTests.class, + InheritedTestTest.class, + TestClassTest.class, + AllMembersSupplierTest.class, + SpecificDataPointsSupplierTest.class, + ParameterizedAssertionErrorTest.class, + WithDataPointMethod.class, + WithNamedDataPoints.class, + WithAutoGeneratedDataPoints.class, + MatcherTest.class, + ObjectContractTest.class, + TheoriesPerformanceTest.class, + JUnit4ClassRunnerTest.class, + UseSuiteAsASuperclassTest.class, + FilterableTest.class, + FilterTest.class, + MaxStarterTest.class, + JUnit38SortingTest.class, + MethodRulesTest.class, + TestRuleTest.class, + TimeoutRuleTest.class, + ParallelClassTest.class, + ParallelMethodTest.class, + ParentRunnerTest.class, + NameRulesTest.class, + ClassRulesTest.class, + ExpectedExceptionTest.class, + TempFolderRuleTest.class, + TemporaryFolderUsageTest.class, + ExternalResourceRuleTest.class, + VerifierRuleTest.class, + CategoryTest.class, + CategoriesAndParameterizedTest.class, + MultiCategoryTest.class, + JavadocTest.class, + ParentRunnerFilteringTest.class, + BlockJUnit4ClassRunnerOverrideTest.class, + RuleMemberValidatorTest.class, + RuleChainTest.class, + BlockJUnit4ClassRunnerTest.class, + CustomBlockJUnit4ClassRunnerTest.class, + MethodSorterTest.class, + TestedOnSupplierTest.class, + StacktracePrintingMatcherTest.class, + StopwatchTest.class, + RunNotifierTest.class, + ConcurrentRunNotifierTest.class, + SynchronizedRunListenerTest.class, + FilterOptionIntegrationTest.class, + JUnitCommandLineParseResultTest.class, + FilterFactoriesTest.class, + CategoryFilterFactoryTest.class, + FrameworkFieldTest.class, + FrameworkMethodTest.class, + FailOnTimeoutTest.class, + JUnitCoreTest.class, + TestWithParametersTest.class, + ParameterizedNamesTest.class, + PublicClassValidatorTest.class, + DisableOnDebugTest.class, + ThrowableCauseMatcherTest.class, + TestWatcherTest.class, + WithParameterSupplier.class, + FailingDataPointMethods.class, + TypeMatchingBetweenMultiDataPointsMethod.class, + TheoriesPerformanceTest.class, + MoneyTest.class, + CategoryValidatorTest.class, + ForwardCompatibilityPrintingTest.class, + DescriptionTest.class, + ErrorReportingRunnerTest.class, + TemporaryFolderRuleAssuredDeletionTest.class +2:325c +3:43c + ObjectContractTest.class diff --git a/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_minimal/diff_JUnit4TestAdapter.java.txt b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_minimal/diff_JUnit4TestAdapter.java.txt new file mode 100644 index 0000000000..203ee36854 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_minimal/diff_JUnit4TestAdapter.java.txt @@ -0,0 +1,53 @@ +====1 +1:11a +2:12,13c +3:12,13c + import org.junit.runner.manipulation.GeneralOrdering; + import org.junit.runner.manipulation.InvalidOrderingException; +====1 +1:13c + import org.junit.runner.manipulation.Sortable; +2:15c +3:15c + import org.junit.runner.manipulation.Orderable; +==== +1:16c + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { +2:18,34c + <<<<<<< HEAD + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { + ||||||| 9c337dcbb + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { + ======= + /** + * The JUnit4TestAdapter enables running JUnit-4-style tests using a JUnit-3-style test runner. + * + *

    To use it, add the following to a test class: + *

    +        public static Test suite() {
    +          return new JUnit4TestAdapter(YourJUnit4TestClass.class);
    +        }
    +  
    + */ + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { + >>>>>>> TEMP_RIGHT_BRANCH +3:18,28c + /** + * The JUnit4TestAdapter enables running JUnit-4-style tests using a JUnit-3-style test runner. + * + *

    To use it, add the following to a test class: + *

    +        public static Test suite() {
    +          return new JUnit4TestAdapter(YourJUnit4TestClass.class);
    +        }
    +  
    + */ + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { +====1 +1:85a +2:104,107c +3:98,101c + + public void order(GeneralOrdering ordering) throws InvalidOrderingException { + ordering.apply(fRunner); + } diff --git a/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_myers/diff_AllTests.java.txt b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_myers/diff_AllTests.java.txt new file mode 100644 index 0000000000..fe14d49aba --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_myers/diff_AllTests.java.txt @@ -0,0 +1,593 @@ +====1 +1:5c + import junit.samples.money.MoneyTest; +2:4a +3:4a +====1 +1:7,16c + import org.junit.experimental.categories.CategoryFilterFactoryTest; + import org.junit.internal.MethodSorterTest; + import org.junit.internal.matchers.StacktracePrintingMatcherTest; + import org.junit.internal.matchers.ThrowableCauseMatcherTest; + import org.junit.rules.DisableOnDebugTest; + import org.junit.rules.StopwatchTest; + import org.junit.runner.FilterFactoriesTest; + import org.junit.runner.FilterOptionIntegrationTest; + import org.junit.runner.JUnitCommandLineParseResultTest; + import org.junit.runner.JUnitCoreTest; +2:6,8c +3:6,8c + import org.junit.internal.AllInternalTests; + import org.junit.rules.AllRulesTests; + import org.junit.runner.AllRunnerTests; +====1 +1:18,21c + import org.junit.runner.notification.ConcurrentRunNotifierTest; + import org.junit.runner.notification.RunNotifierTest; + import org.junit.runner.notification.SynchronizedRunListenerTest; + import org.junit.runners.CustomBlockJUnit4ClassRunnerTest; +2:10c +3:10c + import org.junit.runners.AllRunnersTests; +==== +1:24,118c + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; +2:13,219c + <<<<<<< HEAD + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.OrderWithTest; + import org.junit.tests.manipulation.OrderableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; + ||||||| 9c337dcbb + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; + ======= + import org.junit.samples.AllSamplesTests; + import org.junit.tests.assertion.AllAssertionTests; + import org.junit.tests.deprecated.AllDeprecatedTests; + import org.junit.tests.description.AllDescriptionTests; + import org.junit.tests.experimental.AllExperimentalTests; + import org.junit.tests.junit3compatibility.AllJUnit3CompatibilityTests; + import org.junit.tests.listening.AllListeningTests; + import org.junit.tests.manipulation.AllManipulationTests; + import org.junit.tests.running.AllRunningTests; + import org.junit.tests.validation.AllValidationTests; + import org.junit.validator.AllValidatorTests; + >>>>>>> TEMP_RIGHT_BRANCH +3:13,23c + import org.junit.samples.AllSamplesTests; + import org.junit.tests.assertion.AllAssertionTests; + import org.junit.tests.deprecated.AllDeprecatedTests; + import org.junit.tests.description.AllDescriptionTests; + import org.junit.tests.experimental.AllExperimentalTests; + import org.junit.tests.junit3compatibility.AllJUnit3CompatibilityTests; + import org.junit.tests.listening.AllListeningTests; + import org.junit.tests.manipulation.AllManipulationTests; + import org.junit.tests.running.AllRunningTests; + import org.junit.tests.validation.AllValidationTests; + import org.junit.validator.AllValidatorTests; +====1 +1:120,123c + // These test files need to be cleaned. See + // https://sourceforge.net/pm/task.php?func=detailtask&project_task_id=136507&group_id=15278&group_project_id=51407 + + @SuppressWarnings("deprecation") +2:220a +3:24a +==== +1:126,165c + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, +2:223,323c + <<<<<<< HEAD + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + OrderableTest.class, + OrderWithTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, + ||||||| 9c337dcbb + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, + ======= + AllAssertionTests.class, + AllDeprecatedTests.class, + AllDescriptionTests.class, + AllExperimentalTests.class, + AllInternalTests.class, + AllJUnit3CompatibilityTests.class, + AllListeningTests.class, + AllManipulationTests.class, + AllRulesTests.class, + AllRunnersTests.class, + AllRunnerTests.class, + AllRunningTests.class, + AllSamplesTests.class, + AllValidationTests.class, + AllValidatorTests.class, + >>>>>>> TEMP_RIGHT_BRANCH +3:27,41c + AllAssertionTests.class, + AllDeprecatedTests.class, + AllDescriptionTests.class, + AllExperimentalTests.class, + AllInternalTests.class, + AllJUnit3CompatibilityTests.class, + AllListeningTests.class, + AllManipulationTests.class, + AllRulesTests.class, + AllRunnersTests.class, + AllRunnerTests.class, + AllRunningTests.class, + AllSamplesTests.class, + AllValidationTests.class, + AllValidatorTests.class, +====1 +1:167,238c + ExperimentalTests.class, + InheritedTestTest.class, + TestClassTest.class, + AllMembersSupplierTest.class, + SpecificDataPointsSupplierTest.class, + ParameterizedAssertionErrorTest.class, + WithDataPointMethod.class, + WithNamedDataPoints.class, + WithAutoGeneratedDataPoints.class, + MatcherTest.class, + ObjectContractTest.class, + TheoriesPerformanceTest.class, + JUnit4ClassRunnerTest.class, + UseSuiteAsASuperclassTest.class, + FilterableTest.class, + FilterTest.class, + MaxStarterTest.class, + JUnit38SortingTest.class, + MethodRulesTest.class, + TestRuleTest.class, + TimeoutRuleTest.class, + ParallelClassTest.class, + ParallelMethodTest.class, + ParentRunnerTest.class, + NameRulesTest.class, + ClassRulesTest.class, + ExpectedExceptionTest.class, + TempFolderRuleTest.class, + TemporaryFolderUsageTest.class, + ExternalResourceRuleTest.class, + VerifierRuleTest.class, + CategoryTest.class, + CategoriesAndParameterizedTest.class, + MultiCategoryTest.class, + JavadocTest.class, + ParentRunnerFilteringTest.class, + BlockJUnit4ClassRunnerOverrideTest.class, + RuleMemberValidatorTest.class, + RuleChainTest.class, + BlockJUnit4ClassRunnerTest.class, + CustomBlockJUnit4ClassRunnerTest.class, + MethodSorterTest.class, + TestedOnSupplierTest.class, + StacktracePrintingMatcherTest.class, + StopwatchTest.class, + RunNotifierTest.class, + ConcurrentRunNotifierTest.class, + SynchronizedRunListenerTest.class, + FilterOptionIntegrationTest.class, + JUnitCommandLineParseResultTest.class, + FilterFactoriesTest.class, + CategoryFilterFactoryTest.class, + FrameworkFieldTest.class, + FrameworkMethodTest.class, + FailOnTimeoutTest.class, + JUnitCoreTest.class, + TestWithParametersTest.class, + ParameterizedNamesTest.class, + PublicClassValidatorTest.class, + DisableOnDebugTest.class, + ThrowableCauseMatcherTest.class, + TestWatcherTest.class, + WithParameterSupplier.class, + FailingDataPointMethods.class, + TypeMatchingBetweenMultiDataPointsMethod.class, + TheoriesPerformanceTest.class, + MoneyTest.class, + CategoryValidatorTest.class, + ForwardCompatibilityPrintingTest.class, + DescriptionTest.class, + ErrorReportingRunnerTest.class, + TemporaryFolderRuleAssuredDeletionTest.class +2:325c +3:43c + ObjectContractTest.class diff --git a/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_myers/diff_JUnit4TestAdapter.java.txt b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_myers/diff_JUnit4TestAdapter.java.txt new file mode 100644 index 0000000000..203ee36854 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_myers/diff_JUnit4TestAdapter.java.txt @@ -0,0 +1,53 @@ +====1 +1:11a +2:12,13c +3:12,13c + import org.junit.runner.manipulation.GeneralOrdering; + import org.junit.runner.manipulation.InvalidOrderingException; +====1 +1:13c + import org.junit.runner.manipulation.Sortable; +2:15c +3:15c + import org.junit.runner.manipulation.Orderable; +==== +1:16c + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { +2:18,34c + <<<<<<< HEAD + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { + ||||||| 9c337dcbb + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { + ======= + /** + * The JUnit4TestAdapter enables running JUnit-4-style tests using a JUnit-3-style test runner. + * + *

    To use it, add the following to a test class: + *

    +        public static Test suite() {
    +          return new JUnit4TestAdapter(YourJUnit4TestClass.class);
    +        }
    +  
    + */ + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { + >>>>>>> TEMP_RIGHT_BRANCH +3:18,28c + /** + * The JUnit4TestAdapter enables running JUnit-4-style tests using a JUnit-3-style test runner. + * + *

    To use it, add the following to a test class: + *

    +        public static Test suite() {
    +          return new JUnit4TestAdapter(YourJUnit4TestClass.class);
    +        }
    +  
    + */ + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { +====1 +1:85a +2:104,107c +3:98,101c + + public void order(GeneralOrdering ordering) throws InvalidOrderingException { + ordering.apply(fRunner); + } diff --git a/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_patience/diff_AllTests.java.txt b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_patience/diff_AllTests.java.txt new file mode 100644 index 0000000000..fe14d49aba --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_patience/diff_AllTests.java.txt @@ -0,0 +1,593 @@ +====1 +1:5c + import junit.samples.money.MoneyTest; +2:4a +3:4a +====1 +1:7,16c + import org.junit.experimental.categories.CategoryFilterFactoryTest; + import org.junit.internal.MethodSorterTest; + import org.junit.internal.matchers.StacktracePrintingMatcherTest; + import org.junit.internal.matchers.ThrowableCauseMatcherTest; + import org.junit.rules.DisableOnDebugTest; + import org.junit.rules.StopwatchTest; + import org.junit.runner.FilterFactoriesTest; + import org.junit.runner.FilterOptionIntegrationTest; + import org.junit.runner.JUnitCommandLineParseResultTest; + import org.junit.runner.JUnitCoreTest; +2:6,8c +3:6,8c + import org.junit.internal.AllInternalTests; + import org.junit.rules.AllRulesTests; + import org.junit.runner.AllRunnerTests; +====1 +1:18,21c + import org.junit.runner.notification.ConcurrentRunNotifierTest; + import org.junit.runner.notification.RunNotifierTest; + import org.junit.runner.notification.SynchronizedRunListenerTest; + import org.junit.runners.CustomBlockJUnit4ClassRunnerTest; +2:10c +3:10c + import org.junit.runners.AllRunnersTests; +==== +1:24,118c + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; +2:13,219c + <<<<<<< HEAD + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.OrderWithTest; + import org.junit.tests.manipulation.OrderableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; + ||||||| 9c337dcbb + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; + ======= + import org.junit.samples.AllSamplesTests; + import org.junit.tests.assertion.AllAssertionTests; + import org.junit.tests.deprecated.AllDeprecatedTests; + import org.junit.tests.description.AllDescriptionTests; + import org.junit.tests.experimental.AllExperimentalTests; + import org.junit.tests.junit3compatibility.AllJUnit3CompatibilityTests; + import org.junit.tests.listening.AllListeningTests; + import org.junit.tests.manipulation.AllManipulationTests; + import org.junit.tests.running.AllRunningTests; + import org.junit.tests.validation.AllValidationTests; + import org.junit.validator.AllValidatorTests; + >>>>>>> TEMP_RIGHT_BRANCH +3:13,23c + import org.junit.samples.AllSamplesTests; + import org.junit.tests.assertion.AllAssertionTests; + import org.junit.tests.deprecated.AllDeprecatedTests; + import org.junit.tests.description.AllDescriptionTests; + import org.junit.tests.experimental.AllExperimentalTests; + import org.junit.tests.junit3compatibility.AllJUnit3CompatibilityTests; + import org.junit.tests.listening.AllListeningTests; + import org.junit.tests.manipulation.AllManipulationTests; + import org.junit.tests.running.AllRunningTests; + import org.junit.tests.validation.AllValidationTests; + import org.junit.validator.AllValidatorTests; +====1 +1:120,123c + // These test files need to be cleaned. See + // https://sourceforge.net/pm/task.php?func=detailtask&project_task_id=136507&group_id=15278&group_project_id=51407 + + @SuppressWarnings("deprecation") +2:220a +3:24a +==== +1:126,165c + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, +2:223,323c + <<<<<<< HEAD + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + OrderableTest.class, + OrderWithTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, + ||||||| 9c337dcbb + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, + ======= + AllAssertionTests.class, + AllDeprecatedTests.class, + AllDescriptionTests.class, + AllExperimentalTests.class, + AllInternalTests.class, + AllJUnit3CompatibilityTests.class, + AllListeningTests.class, + AllManipulationTests.class, + AllRulesTests.class, + AllRunnersTests.class, + AllRunnerTests.class, + AllRunningTests.class, + AllSamplesTests.class, + AllValidationTests.class, + AllValidatorTests.class, + >>>>>>> TEMP_RIGHT_BRANCH +3:27,41c + AllAssertionTests.class, + AllDeprecatedTests.class, + AllDescriptionTests.class, + AllExperimentalTests.class, + AllInternalTests.class, + AllJUnit3CompatibilityTests.class, + AllListeningTests.class, + AllManipulationTests.class, + AllRulesTests.class, + AllRunnersTests.class, + AllRunnerTests.class, + AllRunningTests.class, + AllSamplesTests.class, + AllValidationTests.class, + AllValidatorTests.class, +====1 +1:167,238c + ExperimentalTests.class, + InheritedTestTest.class, + TestClassTest.class, + AllMembersSupplierTest.class, + SpecificDataPointsSupplierTest.class, + ParameterizedAssertionErrorTest.class, + WithDataPointMethod.class, + WithNamedDataPoints.class, + WithAutoGeneratedDataPoints.class, + MatcherTest.class, + ObjectContractTest.class, + TheoriesPerformanceTest.class, + JUnit4ClassRunnerTest.class, + UseSuiteAsASuperclassTest.class, + FilterableTest.class, + FilterTest.class, + MaxStarterTest.class, + JUnit38SortingTest.class, + MethodRulesTest.class, + TestRuleTest.class, + TimeoutRuleTest.class, + ParallelClassTest.class, + ParallelMethodTest.class, + ParentRunnerTest.class, + NameRulesTest.class, + ClassRulesTest.class, + ExpectedExceptionTest.class, + TempFolderRuleTest.class, + TemporaryFolderUsageTest.class, + ExternalResourceRuleTest.class, + VerifierRuleTest.class, + CategoryTest.class, + CategoriesAndParameterizedTest.class, + MultiCategoryTest.class, + JavadocTest.class, + ParentRunnerFilteringTest.class, + BlockJUnit4ClassRunnerOverrideTest.class, + RuleMemberValidatorTest.class, + RuleChainTest.class, + BlockJUnit4ClassRunnerTest.class, + CustomBlockJUnit4ClassRunnerTest.class, + MethodSorterTest.class, + TestedOnSupplierTest.class, + StacktracePrintingMatcherTest.class, + StopwatchTest.class, + RunNotifierTest.class, + ConcurrentRunNotifierTest.class, + SynchronizedRunListenerTest.class, + FilterOptionIntegrationTest.class, + JUnitCommandLineParseResultTest.class, + FilterFactoriesTest.class, + CategoryFilterFactoryTest.class, + FrameworkFieldTest.class, + FrameworkMethodTest.class, + FailOnTimeoutTest.class, + JUnitCoreTest.class, + TestWithParametersTest.class, + ParameterizedNamesTest.class, + PublicClassValidatorTest.class, + DisableOnDebugTest.class, + ThrowableCauseMatcherTest.class, + TestWatcherTest.class, + WithParameterSupplier.class, + FailingDataPointMethods.class, + TypeMatchingBetweenMultiDataPointsMethod.class, + TheoriesPerformanceTest.class, + MoneyTest.class, + CategoryValidatorTest.class, + ForwardCompatibilityPrintingTest.class, + DescriptionTest.class, + ErrorReportingRunnerTest.class, + TemporaryFolderRuleAssuredDeletionTest.class +2:325c +3:43c + ObjectContractTest.class diff --git a/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_patience/diff_JUnit4TestAdapter.java.txt b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_patience/diff_JUnit4TestAdapter.java.txt new file mode 100644 index 0000000000..203ee36854 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/gitmerge_recursive_patience/diff_JUnit4TestAdapter.java.txt @@ -0,0 +1,53 @@ +====1 +1:11a +2:12,13c +3:12,13c + import org.junit.runner.manipulation.GeneralOrdering; + import org.junit.runner.manipulation.InvalidOrderingException; +====1 +1:13c + import org.junit.runner.manipulation.Sortable; +2:15c +3:15c + import org.junit.runner.manipulation.Orderable; +==== +1:16c + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { +2:18,34c + <<<<<<< HEAD + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { + ||||||| 9c337dcbb + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { + ======= + /** + * The JUnit4TestAdapter enables running JUnit-4-style tests using a JUnit-3-style test runner. + * + *

    To use it, add the following to a test class: + *

    +        public static Test suite() {
    +          return new JUnit4TestAdapter(YourJUnit4TestClass.class);
    +        }
    +  
    + */ + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { + >>>>>>> TEMP_RIGHT_BRANCH +3:18,28c + /** + * The JUnit4TestAdapter enables running JUnit-4-style tests using a JUnit-3-style test runner. + * + *

    To use it, add the following to a test class: + *

    +        public static Test suite() {
    +          return new JUnit4TestAdapter(YourJUnit4TestClass.class);
    +        }
    +  
    + */ + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { +====1 +1:85a +2:104,107c +3:98,101c + + public void order(GeneralOrdering ordering) throws InvalidOrderingException { + ordering.apply(fRunner); + } diff --git a/src/python/merge_conflict_analysis_diffs/1444/intellimerge/diff_AllTests.java.txt b/src/python/merge_conflict_analysis_diffs/1444/intellimerge/diff_AllTests.java.txt new file mode 100644 index 0000000000..fe14d49aba --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/intellimerge/diff_AllTests.java.txt @@ -0,0 +1,593 @@ +====1 +1:5c + import junit.samples.money.MoneyTest; +2:4a +3:4a +====1 +1:7,16c + import org.junit.experimental.categories.CategoryFilterFactoryTest; + import org.junit.internal.MethodSorterTest; + import org.junit.internal.matchers.StacktracePrintingMatcherTest; + import org.junit.internal.matchers.ThrowableCauseMatcherTest; + import org.junit.rules.DisableOnDebugTest; + import org.junit.rules.StopwatchTest; + import org.junit.runner.FilterFactoriesTest; + import org.junit.runner.FilterOptionIntegrationTest; + import org.junit.runner.JUnitCommandLineParseResultTest; + import org.junit.runner.JUnitCoreTest; +2:6,8c +3:6,8c + import org.junit.internal.AllInternalTests; + import org.junit.rules.AllRulesTests; + import org.junit.runner.AllRunnerTests; +====1 +1:18,21c + import org.junit.runner.notification.ConcurrentRunNotifierTest; + import org.junit.runner.notification.RunNotifierTest; + import org.junit.runner.notification.SynchronizedRunListenerTest; + import org.junit.runners.CustomBlockJUnit4ClassRunnerTest; +2:10c +3:10c + import org.junit.runners.AllRunnersTests; +==== +1:24,118c + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; +2:13,219c + <<<<<<< HEAD + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.OrderWithTest; + import org.junit.tests.manipulation.OrderableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; + ||||||| 9c337dcbb + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; + ======= + import org.junit.samples.AllSamplesTests; + import org.junit.tests.assertion.AllAssertionTests; + import org.junit.tests.deprecated.AllDeprecatedTests; + import org.junit.tests.description.AllDescriptionTests; + import org.junit.tests.experimental.AllExperimentalTests; + import org.junit.tests.junit3compatibility.AllJUnit3CompatibilityTests; + import org.junit.tests.listening.AllListeningTests; + import org.junit.tests.manipulation.AllManipulationTests; + import org.junit.tests.running.AllRunningTests; + import org.junit.tests.validation.AllValidationTests; + import org.junit.validator.AllValidatorTests; + >>>>>>> TEMP_RIGHT_BRANCH +3:13,23c + import org.junit.samples.AllSamplesTests; + import org.junit.tests.assertion.AllAssertionTests; + import org.junit.tests.deprecated.AllDeprecatedTests; + import org.junit.tests.description.AllDescriptionTests; + import org.junit.tests.experimental.AllExperimentalTests; + import org.junit.tests.junit3compatibility.AllJUnit3CompatibilityTests; + import org.junit.tests.listening.AllListeningTests; + import org.junit.tests.manipulation.AllManipulationTests; + import org.junit.tests.running.AllRunningTests; + import org.junit.tests.validation.AllValidationTests; + import org.junit.validator.AllValidatorTests; +====1 +1:120,123c + // These test files need to be cleaned. See + // https://sourceforge.net/pm/task.php?func=detailtask&project_task_id=136507&group_id=15278&group_project_id=51407 + + @SuppressWarnings("deprecation") +2:220a +3:24a +==== +1:126,165c + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, +2:223,323c + <<<<<<< HEAD + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + OrderableTest.class, + OrderWithTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, + ||||||| 9c337dcbb + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, + ======= + AllAssertionTests.class, + AllDeprecatedTests.class, + AllDescriptionTests.class, + AllExperimentalTests.class, + AllInternalTests.class, + AllJUnit3CompatibilityTests.class, + AllListeningTests.class, + AllManipulationTests.class, + AllRulesTests.class, + AllRunnersTests.class, + AllRunnerTests.class, + AllRunningTests.class, + AllSamplesTests.class, + AllValidationTests.class, + AllValidatorTests.class, + >>>>>>> TEMP_RIGHT_BRANCH +3:27,41c + AllAssertionTests.class, + AllDeprecatedTests.class, + AllDescriptionTests.class, + AllExperimentalTests.class, + AllInternalTests.class, + AllJUnit3CompatibilityTests.class, + AllListeningTests.class, + AllManipulationTests.class, + AllRulesTests.class, + AllRunnersTests.class, + AllRunnerTests.class, + AllRunningTests.class, + AllSamplesTests.class, + AllValidationTests.class, + AllValidatorTests.class, +====1 +1:167,238c + ExperimentalTests.class, + InheritedTestTest.class, + TestClassTest.class, + AllMembersSupplierTest.class, + SpecificDataPointsSupplierTest.class, + ParameterizedAssertionErrorTest.class, + WithDataPointMethod.class, + WithNamedDataPoints.class, + WithAutoGeneratedDataPoints.class, + MatcherTest.class, + ObjectContractTest.class, + TheoriesPerformanceTest.class, + JUnit4ClassRunnerTest.class, + UseSuiteAsASuperclassTest.class, + FilterableTest.class, + FilterTest.class, + MaxStarterTest.class, + JUnit38SortingTest.class, + MethodRulesTest.class, + TestRuleTest.class, + TimeoutRuleTest.class, + ParallelClassTest.class, + ParallelMethodTest.class, + ParentRunnerTest.class, + NameRulesTest.class, + ClassRulesTest.class, + ExpectedExceptionTest.class, + TempFolderRuleTest.class, + TemporaryFolderUsageTest.class, + ExternalResourceRuleTest.class, + VerifierRuleTest.class, + CategoryTest.class, + CategoriesAndParameterizedTest.class, + MultiCategoryTest.class, + JavadocTest.class, + ParentRunnerFilteringTest.class, + BlockJUnit4ClassRunnerOverrideTest.class, + RuleMemberValidatorTest.class, + RuleChainTest.class, + BlockJUnit4ClassRunnerTest.class, + CustomBlockJUnit4ClassRunnerTest.class, + MethodSorterTest.class, + TestedOnSupplierTest.class, + StacktracePrintingMatcherTest.class, + StopwatchTest.class, + RunNotifierTest.class, + ConcurrentRunNotifierTest.class, + SynchronizedRunListenerTest.class, + FilterOptionIntegrationTest.class, + JUnitCommandLineParseResultTest.class, + FilterFactoriesTest.class, + CategoryFilterFactoryTest.class, + FrameworkFieldTest.class, + FrameworkMethodTest.class, + FailOnTimeoutTest.class, + JUnitCoreTest.class, + TestWithParametersTest.class, + ParameterizedNamesTest.class, + PublicClassValidatorTest.class, + DisableOnDebugTest.class, + ThrowableCauseMatcherTest.class, + TestWatcherTest.class, + WithParameterSupplier.class, + FailingDataPointMethods.class, + TypeMatchingBetweenMultiDataPointsMethod.class, + TheoriesPerformanceTest.class, + MoneyTest.class, + CategoryValidatorTest.class, + ForwardCompatibilityPrintingTest.class, + DescriptionTest.class, + ErrorReportingRunnerTest.class, + TemporaryFolderRuleAssuredDeletionTest.class +2:325c +3:43c + ObjectContractTest.class diff --git a/src/python/merge_conflict_analysis_diffs/1444/intellimerge/diff_JUnit4TestAdapter.java.txt b/src/python/merge_conflict_analysis_diffs/1444/intellimerge/diff_JUnit4TestAdapter.java.txt new file mode 100644 index 0000000000..203ee36854 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/intellimerge/diff_JUnit4TestAdapter.java.txt @@ -0,0 +1,53 @@ +====1 +1:11a +2:12,13c +3:12,13c + import org.junit.runner.manipulation.GeneralOrdering; + import org.junit.runner.manipulation.InvalidOrderingException; +====1 +1:13c + import org.junit.runner.manipulation.Sortable; +2:15c +3:15c + import org.junit.runner.manipulation.Orderable; +==== +1:16c + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { +2:18,34c + <<<<<<< HEAD + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { + ||||||| 9c337dcbb + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { + ======= + /** + * The JUnit4TestAdapter enables running JUnit-4-style tests using a JUnit-3-style test runner. + * + *

    To use it, add the following to a test class: + *

    +        public static Test suite() {
    +          return new JUnit4TestAdapter(YourJUnit4TestClass.class);
    +        }
    +  
    + */ + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { + >>>>>>> TEMP_RIGHT_BRANCH +3:18,28c + /** + * The JUnit4TestAdapter enables running JUnit-4-style tests using a JUnit-3-style test runner. + * + *

    To use it, add the following to a test class: + *

    +        public static Test suite() {
    +          return new JUnit4TestAdapter(YourJUnit4TestClass.class);
    +        }
    +  
    + */ + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { +====1 +1:85a +2:104,107c +3:98,101c + + public void order(GeneralOrdering ordering) throws InvalidOrderingException { + ordering.apply(fRunner); + } diff --git a/src/python/merge_conflict_analysis_diffs/1444/spork/diff_AllTests.java.txt b/src/python/merge_conflict_analysis_diffs/1444/spork/diff_AllTests.java.txt new file mode 100644 index 0000000000..3a80709a70 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/spork/diff_AllTests.java.txt @@ -0,0 +1,424 @@ +====3 +1:5c +2:5c + import junit.samples.money.MoneyTest; +3:4a +====3 +1:7,16c +2:7,16c + import org.junit.experimental.categories.CategoryFilterFactoryTest; + import org.junit.internal.MethodSorterTest; + import org.junit.internal.matchers.StacktracePrintingMatcherTest; + import org.junit.internal.matchers.ThrowableCauseMatcherTest; + import org.junit.rules.DisableOnDebugTest; + import org.junit.rules.StopwatchTest; + import org.junit.runner.FilterFactoriesTest; + import org.junit.runner.FilterOptionIntegrationTest; + import org.junit.runner.JUnitCommandLineParseResultTest; + import org.junit.runner.JUnitCoreTest; +3:6,8c + import org.junit.internal.AllInternalTests; + import org.junit.rules.AllRulesTests; + import org.junit.runner.AllRunnerTests; +====3 +1:18,21c +2:18,21c + import org.junit.runner.notification.ConcurrentRunNotifierTest; + import org.junit.runner.notification.RunNotifierTest; + import org.junit.runner.notification.SynchronizedRunListenerTest; + import org.junit.runners.CustomBlockJUnit4ClassRunnerTest; +3:10c + import org.junit.runners.AllRunnersTests; +==== +1:24,118c + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; +2:24,120c + import org.junit.runners.model.FrameworkFieldTest; + import org.junit.runners.model.FrameworkMethodTest; + import org.junit.runners.model.TestClassTest; + import org.junit.runners.parameterized.ParameterizedNamesTest; + import org.junit.runners.parameterized.TestWithParametersTest; + import org.junit.tests.assertion.AssertionTest; + import org.junit.tests.assertion.ComparisonFailureTest; + import org.junit.tests.assertion.MultipleFailureExceptionTest; + import org.junit.tests.deprecated.JUnit4ClassRunnerTest; + import org.junit.tests.description.AnnotatedDescriptionTest; + import org.junit.tests.description.SuiteDescriptionTest; + import org.junit.tests.description.TestDescriptionMethodNameTest; + import org.junit.tests.description.TestDescriptionTest; + import org.junit.tests.experimental.AssumptionTest; + import org.junit.tests.experimental.ExperimentalTests; + import org.junit.tests.experimental.MatcherTest; + import org.junit.tests.experimental.categories.CategoriesAndParameterizedTest; + import org.junit.tests.experimental.categories.CategoryTest; + import org.junit.tests.experimental.categories.CategoryValidatorTest; + import org.junit.tests.experimental.categories.JavadocTest; + import org.junit.tests.experimental.categories.MultiCategoryTest; + import org.junit.tests.experimental.max.DescriptionTest; + import org.junit.tests.experimental.max.JUnit38SortingTest; + import org.junit.tests.experimental.max.MaxStarterTest; + import org.junit.tests.experimental.parallel.ParallelClassTest; + import org.junit.tests.experimental.parallel.ParallelMethodTest; + import org.junit.tests.experimental.rules.BlockJUnit4ClassRunnerOverrideTest; + import org.junit.tests.experimental.rules.ClassRulesTest; + import org.junit.tests.experimental.rules.ExpectedExceptionTest; + import org.junit.tests.experimental.rules.ExternalResourceRuleTest; + import org.junit.tests.experimental.rules.MethodRulesTest; + import org.junit.tests.experimental.rules.NameRulesTest; + import org.junit.tests.experimental.rules.RuleChainTest; + import org.junit.tests.experimental.rules.RuleMemberValidatorTest; + import org.junit.tests.experimental.rules.TempFolderRuleTest; + import org.junit.tests.experimental.rules.TemporaryFolderRuleAssuredDeletionTest; + import org.junit.tests.experimental.rules.TemporaryFolderUsageTest; + import org.junit.tests.experimental.rules.TestRuleTest; + import org.junit.tests.experimental.rules.TestWatcherTest; + import org.junit.tests.experimental.rules.TimeoutRuleTest; + import org.junit.tests.experimental.rules.VerifierRuleTest; + import org.junit.tests.experimental.theories.TestedOnSupplierTest; + import org.junit.tests.experimental.theories.internal.AllMembersSupplierTest; + import org.junit.tests.experimental.theories.internal.ParameterizedAssertionErrorTest; + import org.junit.tests.experimental.theories.internal.SpecificDataPointsSupplierTest; + import org.junit.tests.experimental.theories.runner.FailingDataPointMethods; + import org.junit.tests.experimental.theories.runner.TheoriesPerformanceTest; + import org.junit.tests.experimental.theories.runner.TypeMatchingBetweenMultiDataPointsMethod; + import org.junit.tests.experimental.theories.runner.WithAutoGeneratedDataPoints; + import org.junit.tests.experimental.theories.runner.WithDataPointMethod; + import org.junit.tests.experimental.theories.runner.WithNamedDataPoints; + import org.junit.tests.experimental.theories.runner.WithParameterSupplier; + import org.junit.tests.internal.runners.ErrorReportingRunnerTest; + import org.junit.tests.internal.runners.statements.FailOnTimeoutTest; + import org.junit.tests.junit3compatibility.AllTestsTest; + import org.junit.tests.junit3compatibility.ClassRequestTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityPrintingTest; + import org.junit.tests.junit3compatibility.ForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.InitializationErrorForwardCompatibilityTest; + import org.junit.tests.junit3compatibility.JUnit38ClassRunnerTest; + import org.junit.tests.junit3compatibility.OldTestClassAdaptingListenerTest; + import org.junit.tests.junit3compatibility.OldTests; + import org.junit.tests.junit3compatibility.SuiteMethodTest; + import org.junit.tests.listening.ListenerTest; + import org.junit.tests.listening.RunnerTest; + import org.junit.tests.listening.TestListenerTest; + import org.junit.tests.listening.TextListenerTest; + import org.junit.tests.listening.UserStopTest; + import org.junit.tests.manipulation.FilterTest; + import org.junit.tests.manipulation.FilterableTest; + import org.junit.tests.manipulation.OrderWithTest; + import org.junit.tests.manipulation.OrderableTest; + import org.junit.tests.manipulation.SingleMethodTest; + import org.junit.tests.manipulation.SortableTest; + import org.junit.tests.running.classes.BlockJUnit4ClassRunnerTest; + import org.junit.tests.running.classes.ClassLevelMethodsWithIgnoredTestsTest; + import org.junit.tests.running.classes.EnclosedTest; + import org.junit.tests.running.classes.IgnoreClassTest; + import org.junit.tests.running.classes.ParameterizedTestTest; + import org.junit.tests.running.classes.ParentRunnerFilteringTest; + import org.junit.tests.running.classes.ParentRunnerTest; + import org.junit.tests.running.classes.RunWithTest; + import org.junit.tests.running.classes.SuiteTest; + import org.junit.tests.running.classes.UseSuiteAsASuperclassTest; + import org.junit.tests.running.core.CommandLineTest; + import org.junit.tests.running.core.JUnitCoreReturnsCorrectExitCodeTest; + import org.junit.tests.running.core.SystemExitTest; + import org.junit.tests.running.methods.AnnotationTest; + import org.junit.tests.running.methods.ExpectedTest; + import org.junit.tests.running.methods.InheritedTestTest; + import org.junit.tests.running.methods.ParameterizedTestMethodTest; + import org.junit.tests.running.methods.TestMethodTest; + import org.junit.tests.running.methods.TimeoutTest; + import org.junit.tests.validation.BadlyFormedClassesTest; + import org.junit.tests.validation.FailedConstructionTest; + import org.junit.tests.validation.ValidationTest; + import org.junit.validator.PublicClassValidatorTest; +3:13,23c + import org.junit.samples.AllSamplesTests; + import org.junit.tests.assertion.AllAssertionTests; + import org.junit.tests.deprecated.AllDeprecatedTests; + import org.junit.tests.description.AllDescriptionTests; + import org.junit.tests.experimental.AllExperimentalTests; + import org.junit.tests.junit3compatibility.AllJUnit3CompatibilityTests; + import org.junit.tests.listening.AllListeningTests; + import org.junit.tests.manipulation.AllManipulationTests; + import org.junit.tests.running.AllRunningTests; + import org.junit.tests.validation.AllValidationTests; + import org.junit.validator.AllValidatorTests; +====3 +1:120,123c +2:122,125c + // These test files need to be cleaned. See + // https://sourceforge.net/pm/task.php?func=detailtask&project_task_id=136507&group_id=15278&group_project_id=51407 + + @SuppressWarnings("deprecation") +3:24a +==== +1:126,165c + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, +2:128,169c + AssumptionTest.class, + ClassRequestTest.class, + ListenerTest.class, + FailedConstructionTest.class, + TestDescriptionTest.class, + TestDescriptionMethodNameTest.class, + SuiteDescriptionTest.class, + AllTestsTest.class, + AnnotationTest.class, + AssertionTest.class, + CommandLineTest.class, + ExpectedTest.class, + ComparisonFailureTest.class, + MultipleFailureExceptionTest.class, + ForwardCompatibilityTest.class, + OldTests.class, + ParameterizedTestTest.class, + RunWithTest.class, + RunnerTest.class, + SuiteTest.class, + TestListenerTest.class, + TestMethodTest.class, + TextListenerTest.class, + TimeoutTest.class, + EnclosedTest.class, + ParameterizedTestMethodTest.class, + InitializationErrorForwardCompatibilityTest.class, + SingleMethodTest.class, + ClassLevelMethodsWithIgnoredTestsTest.class, + ValidationTest.class, + UserStopTest.class, + SortableTest.class, + OrderableTest.class, + OrderWithTest.class, + JUnit38ClassRunnerTest.class, + SystemExitTest.class, + JUnitCoreReturnsCorrectExitCodeTest.class, + SuiteMethodTest.class, + BadlyFormedClassesTest.class, + IgnoreClassTest.class, + OldTestClassAdaptingListenerTest.class, + AnnotatedDescriptionTest.class, +3:27,41c + AllAssertionTests.class, + AllDeprecatedTests.class, + AllDescriptionTests.class, + AllExperimentalTests.class, + AllInternalTests.class, + AllJUnit3CompatibilityTests.class, + AllListeningTests.class, + AllManipulationTests.class, + AllRulesTests.class, + AllRunnersTests.class, + AllRunnerTests.class, + AllRunningTests.class, + AllSamplesTests.class, + AllValidationTests.class, + AllValidatorTests.class, +====3 +1:167,238c +2:171,242c + ExperimentalTests.class, + InheritedTestTest.class, + TestClassTest.class, + AllMembersSupplierTest.class, + SpecificDataPointsSupplierTest.class, + ParameterizedAssertionErrorTest.class, + WithDataPointMethod.class, + WithNamedDataPoints.class, + WithAutoGeneratedDataPoints.class, + MatcherTest.class, + ObjectContractTest.class, + TheoriesPerformanceTest.class, + JUnit4ClassRunnerTest.class, + UseSuiteAsASuperclassTest.class, + FilterableTest.class, + FilterTest.class, + MaxStarterTest.class, + JUnit38SortingTest.class, + MethodRulesTest.class, + TestRuleTest.class, + TimeoutRuleTest.class, + ParallelClassTest.class, + ParallelMethodTest.class, + ParentRunnerTest.class, + NameRulesTest.class, + ClassRulesTest.class, + ExpectedExceptionTest.class, + TempFolderRuleTest.class, + TemporaryFolderUsageTest.class, + ExternalResourceRuleTest.class, + VerifierRuleTest.class, + CategoryTest.class, + CategoriesAndParameterizedTest.class, + MultiCategoryTest.class, + JavadocTest.class, + ParentRunnerFilteringTest.class, + BlockJUnit4ClassRunnerOverrideTest.class, + RuleMemberValidatorTest.class, + RuleChainTest.class, + BlockJUnit4ClassRunnerTest.class, + CustomBlockJUnit4ClassRunnerTest.class, + MethodSorterTest.class, + TestedOnSupplierTest.class, + StacktracePrintingMatcherTest.class, + StopwatchTest.class, + RunNotifierTest.class, + ConcurrentRunNotifierTest.class, + SynchronizedRunListenerTest.class, + FilterOptionIntegrationTest.class, + JUnitCommandLineParseResultTest.class, + FilterFactoriesTest.class, + CategoryFilterFactoryTest.class, + FrameworkFieldTest.class, + FrameworkMethodTest.class, + FailOnTimeoutTest.class, + JUnitCoreTest.class, + TestWithParametersTest.class, + ParameterizedNamesTest.class, + PublicClassValidatorTest.class, + DisableOnDebugTest.class, + ThrowableCauseMatcherTest.class, + TestWatcherTest.class, + WithParameterSupplier.class, + FailingDataPointMethods.class, + TypeMatchingBetweenMultiDataPointsMethod.class, + TheoriesPerformanceTest.class, + MoneyTest.class, + CategoryValidatorTest.class, + ForwardCompatibilityPrintingTest.class, + DescriptionTest.class, + ErrorReportingRunnerTest.class, + TemporaryFolderRuleAssuredDeletionTest.class +3:43c + ObjectContractTest.class diff --git a/src/python/merge_conflict_analysis_diffs/1444/spork/diff_JUnit4TestAdapter.java.txt b/src/python/merge_conflict_analysis_diffs/1444/spork/diff_JUnit4TestAdapter.java.txt new file mode 100644 index 0000000000..45b245791d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1444/spork/diff_JUnit4TestAdapter.java.txt @@ -0,0 +1,37 @@ +====1 +1:11a +2:12,13c +3:12,13c + import org.junit.runner.manipulation.GeneralOrdering; + import org.junit.runner.manipulation.InvalidOrderingException; +====1 +1:13c + import org.junit.runner.manipulation.Sortable; +2:15c +3:15c + import org.junit.runner.manipulation.Orderable; +==== +1:16c + public class JUnit4TestAdapter implements Test, Filterable, Sortable, Describable { +2:18c + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { +3:18,28c + /** + * The JUnit4TestAdapter enables running JUnit-4-style tests using a JUnit-3-style test runner. + * + *

    To use it, add the following to a test class: + *

    +        public static Test suite() {
    +          return new JUnit4TestAdapter(YourJUnit4TestClass.class);
    +        }
    +  
    + */ + public class JUnit4TestAdapter implements Test, Filterable, Orderable, Describable { +====1 +1:85a +2:88,91c +3:98,101c + + public void order(GeneralOrdering ordering) throws InvalidOrderingException { + ordering.apply(fRunner); + } diff --git a/src/python/merge_conflict_analysis_diffs/1642/git_hires_merge/diff_StaticFilesConfiguration.java.txt b/src/python/merge_conflict_analysis_diffs/1642/git_hires_merge/diff_StaticFilesConfiguration.java.txt new file mode 100644 index 0000000000..5956031d13 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1642/git_hires_merge/diff_StaticFilesConfiguration.java.txt @@ -0,0 +1,200 @@ +====1 +1:20c + import java.io.InputStream; +2:19a +3:19a +====1 +1:36c + import spark.resource.ClassPathResource; +2:34a +3:34a +====1 +1:40c + import spark.resource.JarResourceHandler; +2:37a +3:37a +====3 +1:47c +2:44c + * TODO: Cache-Control and ETAG +3:44c + * TODO: ETAG ? +====1 +1:53c + private List jarResourceHandlers = null; +2:49a +3:49a +====3 +1:62a +2:58a +3:59,62c + * Attempt consuming using either static resource handlers or jar resource handlers + * + * @param httpRequest The HTTP servlet request. + * @param httpResponse The HTTP servlet response. +====3 +1:63a +2:59a +3:64c + * @throws IOException in case of IO error. +====3 +1:66a +2:62a +3:68,71c + try { + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } +==== +1:68,73c + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } + + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; +2:64,65c + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; +3:73,75c + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + LOG.warn(directoryTraversalDetection.getMessage() + " directory traversal detection for path: " + + httpRequest.getPathInfo()); +====3 +1:75c +2:67c + +3:76a +==== +1:89,110c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + return true; + } + } + + } + return false; + } + + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +2:81c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +3:90,93c + + if (MimeType.shouldGuess()) { + httpResponse.setHeader(MimeType.CONTENT_TYPE, MimeType.fromResource(resource)); + } +==== +1:112,114c + + IOUtils.copy(stream, wrappedOutputStream); + +2:83c + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); +3:95,97c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); +====1 +1:117c + +2:85a +3:99a +====1 +1:120a +2:89c +3:103c + +====1 +1:135,139c + if (jarResourceHandlers != null) { + jarResourceHandlers.clear(); + jarResourceHandlers = null; + } + +2:103a +3:117a +====1 +1:153,163c + try { + ClassPathResource resource = new ClassPathResource(folder); + + if (configureJarCase(folder, resource)) { + return; + } + + if (!resource.getFile().isDirectory()) { + LOG.error("Static resource location must be a folder"); + return; + } +2:116a +3:130a +====1 +1:165,172c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); + } + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + } catch (IOException e) { + LOG.error("Error when creating StaticResourceHandler", e); +2:118,119c +3:132,133c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); +==== +1:173a +2:121,123c + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); +3:135,138c + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + StaticFilesFolder.localConfiguredTo(folder); +====1 +1:179,200c + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } else { + LOG.error("Static file configuration failed."); + } + + } + return false; + } + +2:128a +3:143a +====3 +1:224a +2:152a +3:168,169c + + StaticFilesFolder.externalConfiguredTo(folder); diff --git a/src/python/merge_conflict_analysis_diffs/1642/gitmerge_ort/diff_StaticFilesConfiguration.java.txt b/src/python/merge_conflict_analysis_diffs/1642/gitmerge_ort/diff_StaticFilesConfiguration.java.txt new file mode 100644 index 0000000000..67e5c45589 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1642/gitmerge_ort/diff_StaticFilesConfiguration.java.txt @@ -0,0 +1,323 @@ +====1 +1:20c + import java.io.InputStream; +2:19a +3:19a +====1 +1:36c + import spark.resource.ClassPathResource; +2:34a +3:34a +====1 +1:40c + import spark.resource.JarResourceHandler; +2:37a +3:37a +====1 +1:47c + * TODO: Cache-Control and ETAG +2:44c +3:44c + * TODO: ETAG ? +====1 +1:53c + private List jarResourceHandlers = null; +2:49a +3:49a +====1 +1:62a +2:59,62c +3:59,62c + * Attempt consuming using either static resource handlers or jar resource handlers + * + * @param httpRequest The HTTP servlet request. + * @param httpResponse The HTTP servlet response. +====1 +1:63a +2:64c +3:64c + * @throws IOException in case of IO error. +====1 +1:66a +2:68,71c +3:68,71c + try { + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } +==== +1:68,73c + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } + + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; +2:73,78c + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; + } + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + LOG.warn(directoryTraversalDetection.getMessage() + " directory traversal detection for path: " + + httpRequest.getPathInfo()); +3:73,75c + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + LOG.warn(directoryTraversalDetection.getMessage() + " directory traversal detection for path: " + + httpRequest.getPathInfo()); +==== +1:75c + +2:80,89c + <<<<<<< HEAD + + ||||||| 114d245e + + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; + } + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:76a +====1 +1:89,96c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + return true; + } + } +2:102a +3:89a +====1 +1:98,110c + } + return false; + } + + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +2:104,106c +3:91,93c + if (MimeType.shouldGuess()) { + httpResponse.setHeader(MimeType.CONTENT_TYPE, MimeType.fromResource(resource)); + } +====1 +1:111a +2:108c +3:95c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +====1 +1:113,114c + IOUtils.copy(stream, wrappedOutputStream); + +2:110c +3:97c + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); +====1 +1:117c + +2:112a +3:99a +====1 +1:120a +2:116c +3:103c + +====2 +1:124a +3:107a +2:121,171c + <<<<<<< HEAD + ||||||| 114d245e + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + + IOUtils.copy(stream, wrappedOutputStream); + + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + + return true; + } + } + } + return false; + } + + ======= + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + if (MimeType.shouldGuess()) { + httpResponse.setHeader(MimeType.CONTENT_TYPE, MimeType.fromPathInfo(httpRequest.getPathInfo())); + } + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + + IOUtils.copy(stream, wrappedOutputStream); + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + return true; + } + } + } + return false; + } + + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:135,139c + if (jarResourceHandlers != null) { + jarResourceHandlers.clear(); + jarResourceHandlers = null; + } + +2:181a +3:117a +====1 +1:153,163c + try { + ClassPathResource resource = new ClassPathResource(folder); + + if (configureJarCase(folder, resource)) { + return; + } + + if (!resource.getFile().isDirectory()) { + LOG.error("Static resource location must be a folder"); + return; + } +2:194a +3:130a +====1 +1:165,172c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); + } + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + } catch (IOException e) { + LOG.error("Error when creating StaticResourceHandler", e); +2:196,197c +3:132,133c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); +==== +1:173a +2:199,206c + + <<<<<<< HEAD + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + ||||||| 114d245e + ======= + StaticFilesFolder.localConfiguredTo(folder); + >>>>>>> TEMP_RIGHT_BRANCH +3:135,138c + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + StaticFilesFolder.localConfiguredTo(folder); +==== +1:179,200c + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } else { + LOG.error("Static file configuration failed."); + } + + } + return false; + } + +2:212,258c + <<<<<<< HEAD + ||||||| 114d245e + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } else { + LOG.error("Static file configuration failed."); + } + + } + return false; + } + + ======= + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } + + LOG.error("Static file configuration failed."); + } + return false; + } + + >>>>>>> TEMP_RIGHT_BRANCH +3:143a +====1 +1:224a +2:283,284c +3:168,169c + + StaticFilesFolder.externalConfiguredTo(folder); diff --git a/src/python/merge_conflict_analysis_diffs/1642/gitmerge_ort_adjacent/diff_StaticFilesConfiguration.java.txt b/src/python/merge_conflict_analysis_diffs/1642/gitmerge_ort_adjacent/diff_StaticFilesConfiguration.java.txt new file mode 100644 index 0000000000..5956031d13 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1642/gitmerge_ort_adjacent/diff_StaticFilesConfiguration.java.txt @@ -0,0 +1,200 @@ +====1 +1:20c + import java.io.InputStream; +2:19a +3:19a +====1 +1:36c + import spark.resource.ClassPathResource; +2:34a +3:34a +====1 +1:40c + import spark.resource.JarResourceHandler; +2:37a +3:37a +====3 +1:47c +2:44c + * TODO: Cache-Control and ETAG +3:44c + * TODO: ETAG ? +====1 +1:53c + private List jarResourceHandlers = null; +2:49a +3:49a +====3 +1:62a +2:58a +3:59,62c + * Attempt consuming using either static resource handlers or jar resource handlers + * + * @param httpRequest The HTTP servlet request. + * @param httpResponse The HTTP servlet response. +====3 +1:63a +2:59a +3:64c + * @throws IOException in case of IO error. +====3 +1:66a +2:62a +3:68,71c + try { + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } +==== +1:68,73c + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } + + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; +2:64,65c + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; +3:73,75c + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + LOG.warn(directoryTraversalDetection.getMessage() + " directory traversal detection for path: " + + httpRequest.getPathInfo()); +====3 +1:75c +2:67c + +3:76a +==== +1:89,110c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + return true; + } + } + + } + return false; + } + + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +2:81c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +3:90,93c + + if (MimeType.shouldGuess()) { + httpResponse.setHeader(MimeType.CONTENT_TYPE, MimeType.fromResource(resource)); + } +==== +1:112,114c + + IOUtils.copy(stream, wrappedOutputStream); + +2:83c + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); +3:95,97c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); +====1 +1:117c + +2:85a +3:99a +====1 +1:120a +2:89c +3:103c + +====1 +1:135,139c + if (jarResourceHandlers != null) { + jarResourceHandlers.clear(); + jarResourceHandlers = null; + } + +2:103a +3:117a +====1 +1:153,163c + try { + ClassPathResource resource = new ClassPathResource(folder); + + if (configureJarCase(folder, resource)) { + return; + } + + if (!resource.getFile().isDirectory()) { + LOG.error("Static resource location must be a folder"); + return; + } +2:116a +3:130a +====1 +1:165,172c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); + } + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + } catch (IOException e) { + LOG.error("Error when creating StaticResourceHandler", e); +2:118,119c +3:132,133c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); +==== +1:173a +2:121,123c + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); +3:135,138c + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + StaticFilesFolder.localConfiguredTo(folder); +====1 +1:179,200c + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } else { + LOG.error("Static file configuration failed."); + } + + } + return false; + } + +2:128a +3:143a +====3 +1:224a +2:152a +3:168,169c + + StaticFilesFolder.externalConfiguredTo(folder); diff --git a/src/python/merge_conflict_analysis_diffs/1642/gitmerge_ort_ignorespace/diff_StaticFilesConfiguration.java.txt b/src/python/merge_conflict_analysis_diffs/1642/gitmerge_ort_ignorespace/diff_StaticFilesConfiguration.java.txt new file mode 100644 index 0000000000..cbc2f9492a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1642/gitmerge_ort_ignorespace/diff_StaticFilesConfiguration.java.txt @@ -0,0 +1,331 @@ +====1 +1:20c + import java.io.InputStream; +2:19a +3:19a +====1 +1:36c + import spark.resource.ClassPathResource; +2:34a +3:34a +====1 +1:40c + import spark.resource.JarResourceHandler; +2:37a +3:37a +====1 +1:47c + * TODO: Cache-Control and ETAG +2:44c +3:44c + * TODO: ETAG ? +====1 +1:53c + private List jarResourceHandlers = null; +2:49a +3:49a +====1 +1:62a +2:59,62c +3:59,62c + * Attempt consuming using either static resource handlers or jar resource handlers + * + * @param httpRequest The HTTP servlet request. + * @param httpResponse The HTTP servlet response. +====1 +1:63a +2:64c +3:64c + * @throws IOException in case of IO error. +==== +1:67,73c + + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } + + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; +2:68,85c + try { + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } + + <<<<<<< HEAD + ||||||| 114d245e + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; + } + + ======= + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; + } + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + LOG.warn(directoryTraversalDetection.getMessage() + " directory traversal detection for path: " + + httpRequest.getPathInfo()); +3:68,75c + try { + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } + + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + LOG.warn(directoryTraversalDetection.getMessage() + " directory traversal detection for path: " + + httpRequest.getPathInfo()); +==== +1:75c + +2:87c + >>>>>>> TEMP_RIGHT_BRANCH +3:76a +====1 +1:89,96c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + return true; + } + } +2:100a +3:89a +====1 +1:98,110c + } + return false; + } + + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +2:102,104c +3:91,93c + if (MimeType.shouldGuess()) { + httpResponse.setHeader(MimeType.CONTENT_TYPE, MimeType.fromResource(resource)); + } +====1 +1:111a +2:106c +3:95c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +====1 +1:113,114c + IOUtils.copy(stream, wrappedOutputStream); + +2:108c +3:97c + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); +====1 +1:117c + +2:110a +3:99a +====1 +1:120a +2:114c +3:103c + +====2 +1:124a +3:107a +2:119,169c + <<<<<<< HEAD + ||||||| 114d245e + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + + IOUtils.copy(stream, wrappedOutputStream); + + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + + return true; + } + } + } + return false; + } + + ======= + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + if (MimeType.shouldGuess()) { + httpResponse.setHeader(MimeType.CONTENT_TYPE, MimeType.fromPathInfo(httpRequest.getPathInfo())); + } + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + + IOUtils.copy(stream, wrappedOutputStream); + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + return true; + } + } + } + return false; + } + + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:135,139c + if (jarResourceHandlers != null) { + jarResourceHandlers.clear(); + jarResourceHandlers = null; + } + +2:179a +3:117a +====1 +1:153,163c + try { + ClassPathResource resource = new ClassPathResource(folder); + + if (configureJarCase(folder, resource)) { + return; + } + + if (!resource.getFile().isDirectory()) { + LOG.error("Static resource location must be a folder"); + return; + } +2:192a +3:130a +====1 +1:165,172c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); + } + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + } catch (IOException e) { + LOG.error("Error when creating StaticResourceHandler", e); +2:194,195c +3:132,133c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); +==== +1:173a +2:197,211c + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + <<<<<<< HEAD + ||||||| 114d245e + } catch (IOException e) { + LOG.error("Error when creating StaticResourceHandler", e); + } + ======= + } catch (IOException e) { + LOG.error("Error when creating StaticResourceHandler", e); + } + + StaticFilesFolder.localConfiguredTo(folder); + >>>>>>> TEMP_RIGHT_BRANCH +3:135,138c + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + StaticFilesFolder.localConfiguredTo(folder); +==== +1:179,200c + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } else { + LOG.error("Static file configuration failed."); + } + + } + return false; + } + +2:217,263c + <<<<<<< HEAD + ||||||| 114d245e + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } else { + LOG.error("Static file configuration failed."); + } + + } + return false; + } + + ======= + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } + + LOG.error("Static file configuration failed."); + } + return false; + } + + >>>>>>> TEMP_RIGHT_BRANCH +3:143a +====1 +1:224a +2:288,289c +3:168,169c + + StaticFilesFolder.externalConfiguredTo(folder); diff --git a/src/python/merge_conflict_analysis_diffs/1642/gitmerge_ort_imports/diff_StaticFilesConfiguration.java.txt b/src/python/merge_conflict_analysis_diffs/1642/gitmerge_ort_imports/diff_StaticFilesConfiguration.java.txt new file mode 100644 index 0000000000..5956031d13 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1642/gitmerge_ort_imports/diff_StaticFilesConfiguration.java.txt @@ -0,0 +1,200 @@ +====1 +1:20c + import java.io.InputStream; +2:19a +3:19a +====1 +1:36c + import spark.resource.ClassPathResource; +2:34a +3:34a +====1 +1:40c + import spark.resource.JarResourceHandler; +2:37a +3:37a +====3 +1:47c +2:44c + * TODO: Cache-Control and ETAG +3:44c + * TODO: ETAG ? +====1 +1:53c + private List jarResourceHandlers = null; +2:49a +3:49a +====3 +1:62a +2:58a +3:59,62c + * Attempt consuming using either static resource handlers or jar resource handlers + * + * @param httpRequest The HTTP servlet request. + * @param httpResponse The HTTP servlet response. +====3 +1:63a +2:59a +3:64c + * @throws IOException in case of IO error. +====3 +1:66a +2:62a +3:68,71c + try { + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } +==== +1:68,73c + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } + + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; +2:64,65c + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; +3:73,75c + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + LOG.warn(directoryTraversalDetection.getMessage() + " directory traversal detection for path: " + + httpRequest.getPathInfo()); +====3 +1:75c +2:67c + +3:76a +==== +1:89,110c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + return true; + } + } + + } + return false; + } + + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +2:81c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +3:90,93c + + if (MimeType.shouldGuess()) { + httpResponse.setHeader(MimeType.CONTENT_TYPE, MimeType.fromResource(resource)); + } +==== +1:112,114c + + IOUtils.copy(stream, wrappedOutputStream); + +2:83c + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); +3:95,97c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); +====1 +1:117c + +2:85a +3:99a +====1 +1:120a +2:89c +3:103c + +====1 +1:135,139c + if (jarResourceHandlers != null) { + jarResourceHandlers.clear(); + jarResourceHandlers = null; + } + +2:103a +3:117a +====1 +1:153,163c + try { + ClassPathResource resource = new ClassPathResource(folder); + + if (configureJarCase(folder, resource)) { + return; + } + + if (!resource.getFile().isDirectory()) { + LOG.error("Static resource location must be a folder"); + return; + } +2:116a +3:130a +====1 +1:165,172c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); + } + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + } catch (IOException e) { + LOG.error("Error when creating StaticResourceHandler", e); +2:118,119c +3:132,133c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); +==== +1:173a +2:121,123c + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); +3:135,138c + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + StaticFilesFolder.localConfiguredTo(folder); +====1 +1:179,200c + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } else { + LOG.error("Static file configuration failed."); + } + + } + return false; + } + +2:128a +3:143a +====3 +1:224a +2:152a +3:168,169c + + StaticFilesFolder.externalConfiguredTo(folder); diff --git a/src/python/merge_conflict_analysis_diffs/1642/gitmerge_ort_imports_ignorespace/diff_StaticFilesConfiguration.java.txt b/src/python/merge_conflict_analysis_diffs/1642/gitmerge_ort_imports_ignorespace/diff_StaticFilesConfiguration.java.txt new file mode 100644 index 0000000000..5956031d13 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1642/gitmerge_ort_imports_ignorespace/diff_StaticFilesConfiguration.java.txt @@ -0,0 +1,200 @@ +====1 +1:20c + import java.io.InputStream; +2:19a +3:19a +====1 +1:36c + import spark.resource.ClassPathResource; +2:34a +3:34a +====1 +1:40c + import spark.resource.JarResourceHandler; +2:37a +3:37a +====3 +1:47c +2:44c + * TODO: Cache-Control and ETAG +3:44c + * TODO: ETAG ? +====1 +1:53c + private List jarResourceHandlers = null; +2:49a +3:49a +====3 +1:62a +2:58a +3:59,62c + * Attempt consuming using either static resource handlers or jar resource handlers + * + * @param httpRequest The HTTP servlet request. + * @param httpResponse The HTTP servlet response. +====3 +1:63a +2:59a +3:64c + * @throws IOException in case of IO error. +====3 +1:66a +2:62a +3:68,71c + try { + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } +==== +1:68,73c + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } + + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; +2:64,65c + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; +3:73,75c + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + LOG.warn(directoryTraversalDetection.getMessage() + " directory traversal detection for path: " + + httpRequest.getPathInfo()); +====3 +1:75c +2:67c + +3:76a +==== +1:89,110c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + return true; + } + } + + } + return false; + } + + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +2:81c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +3:90,93c + + if (MimeType.shouldGuess()) { + httpResponse.setHeader(MimeType.CONTENT_TYPE, MimeType.fromResource(resource)); + } +==== +1:112,114c + + IOUtils.copy(stream, wrappedOutputStream); + +2:83c + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); +3:95,97c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); +====1 +1:117c + +2:85a +3:99a +====1 +1:120a +2:89c +3:103c + +====1 +1:135,139c + if (jarResourceHandlers != null) { + jarResourceHandlers.clear(); + jarResourceHandlers = null; + } + +2:103a +3:117a +====1 +1:153,163c + try { + ClassPathResource resource = new ClassPathResource(folder); + + if (configureJarCase(folder, resource)) { + return; + } + + if (!resource.getFile().isDirectory()) { + LOG.error("Static resource location must be a folder"); + return; + } +2:116a +3:130a +====1 +1:165,172c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); + } + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + } catch (IOException e) { + LOG.error("Error when creating StaticResourceHandler", e); +2:118,119c +3:132,133c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); +==== +1:173a +2:121,123c + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); +3:135,138c + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + StaticFilesFolder.localConfiguredTo(folder); +====1 +1:179,200c + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } else { + LOG.error("Static file configuration failed."); + } + + } + return false; + } + +2:128a +3:143a +====3 +1:224a +2:152a +3:168,169c + + StaticFilesFolder.externalConfiguredTo(folder); diff --git a/src/python/merge_conflict_analysis_diffs/1642/gitmerge_recursive_histogram/diff_StaticFilesConfiguration.java.txt b/src/python/merge_conflict_analysis_diffs/1642/gitmerge_recursive_histogram/diff_StaticFilesConfiguration.java.txt new file mode 100644 index 0000000000..67e5c45589 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1642/gitmerge_recursive_histogram/diff_StaticFilesConfiguration.java.txt @@ -0,0 +1,323 @@ +====1 +1:20c + import java.io.InputStream; +2:19a +3:19a +====1 +1:36c + import spark.resource.ClassPathResource; +2:34a +3:34a +====1 +1:40c + import spark.resource.JarResourceHandler; +2:37a +3:37a +====1 +1:47c + * TODO: Cache-Control and ETAG +2:44c +3:44c + * TODO: ETAG ? +====1 +1:53c + private List jarResourceHandlers = null; +2:49a +3:49a +====1 +1:62a +2:59,62c +3:59,62c + * Attempt consuming using either static resource handlers or jar resource handlers + * + * @param httpRequest The HTTP servlet request. + * @param httpResponse The HTTP servlet response. +====1 +1:63a +2:64c +3:64c + * @throws IOException in case of IO error. +====1 +1:66a +2:68,71c +3:68,71c + try { + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } +==== +1:68,73c + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } + + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; +2:73,78c + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; + } + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + LOG.warn(directoryTraversalDetection.getMessage() + " directory traversal detection for path: " + + httpRequest.getPathInfo()); +3:73,75c + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + LOG.warn(directoryTraversalDetection.getMessage() + " directory traversal detection for path: " + + httpRequest.getPathInfo()); +==== +1:75c + +2:80,89c + <<<<<<< HEAD + + ||||||| 114d245e + + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; + } + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:76a +====1 +1:89,96c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + return true; + } + } +2:102a +3:89a +====1 +1:98,110c + } + return false; + } + + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +2:104,106c +3:91,93c + if (MimeType.shouldGuess()) { + httpResponse.setHeader(MimeType.CONTENT_TYPE, MimeType.fromResource(resource)); + } +====1 +1:111a +2:108c +3:95c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +====1 +1:113,114c + IOUtils.copy(stream, wrappedOutputStream); + +2:110c +3:97c + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); +====1 +1:117c + +2:112a +3:99a +====1 +1:120a +2:116c +3:103c + +====2 +1:124a +3:107a +2:121,171c + <<<<<<< HEAD + ||||||| 114d245e + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + + IOUtils.copy(stream, wrappedOutputStream); + + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + + return true; + } + } + } + return false; + } + + ======= + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + if (MimeType.shouldGuess()) { + httpResponse.setHeader(MimeType.CONTENT_TYPE, MimeType.fromPathInfo(httpRequest.getPathInfo())); + } + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + + IOUtils.copy(stream, wrappedOutputStream); + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + return true; + } + } + } + return false; + } + + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:135,139c + if (jarResourceHandlers != null) { + jarResourceHandlers.clear(); + jarResourceHandlers = null; + } + +2:181a +3:117a +====1 +1:153,163c + try { + ClassPathResource resource = new ClassPathResource(folder); + + if (configureJarCase(folder, resource)) { + return; + } + + if (!resource.getFile().isDirectory()) { + LOG.error("Static resource location must be a folder"); + return; + } +2:194a +3:130a +====1 +1:165,172c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); + } + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + } catch (IOException e) { + LOG.error("Error when creating StaticResourceHandler", e); +2:196,197c +3:132,133c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); +==== +1:173a +2:199,206c + + <<<<<<< HEAD + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + ||||||| 114d245e + ======= + StaticFilesFolder.localConfiguredTo(folder); + >>>>>>> TEMP_RIGHT_BRANCH +3:135,138c + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + StaticFilesFolder.localConfiguredTo(folder); +==== +1:179,200c + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } else { + LOG.error("Static file configuration failed."); + } + + } + return false; + } + +2:212,258c + <<<<<<< HEAD + ||||||| 114d245e + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } else { + LOG.error("Static file configuration failed."); + } + + } + return false; + } + + ======= + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } + + LOG.error("Static file configuration failed."); + } + return false; + } + + >>>>>>> TEMP_RIGHT_BRANCH +3:143a +====1 +1:224a +2:283,284c +3:168,169c + + StaticFilesFolder.externalConfiguredTo(folder); diff --git a/src/python/merge_conflict_analysis_diffs/1642/gitmerge_recursive_ignorespace/diff_StaticFilesConfiguration.java.txt b/src/python/merge_conflict_analysis_diffs/1642/gitmerge_recursive_ignorespace/diff_StaticFilesConfiguration.java.txt new file mode 100644 index 0000000000..2cda2c27d6 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1642/gitmerge_recursive_ignorespace/diff_StaticFilesConfiguration.java.txt @@ -0,0 +1,327 @@ +====1 +1:20c + import java.io.InputStream; +2:19a +3:19a +====1 +1:36c + import spark.resource.ClassPathResource; +2:34a +3:34a +====1 +1:40c + import spark.resource.JarResourceHandler; +2:37a +3:37a +====1 +1:47c + * TODO: Cache-Control and ETAG +2:44c +3:44c + * TODO: ETAG ? +====1 +1:53c + private List jarResourceHandlers = null; +2:49a +3:49a +====1 +1:62a +2:59,62c +3:59,62c + * Attempt consuming using either static resource handlers or jar resource handlers + * + * @param httpRequest The HTTP servlet request. + * @param httpResponse The HTTP servlet response. +====1 +1:63a +2:64c +3:64c + * @throws IOException in case of IO error. +==== +1:67,73c + + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } + + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; +2:68,85c + try { + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } + + <<<<<<< HEAD + ||||||| 114d245e + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; + } + + ======= + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; + } + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + LOG.warn(directoryTraversalDetection.getMessage() + " directory traversal detection for path: " + + httpRequest.getPathInfo()); +3:68,75c + try { + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } + + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + LOG.warn(directoryTraversalDetection.getMessage() + " directory traversal detection for path: " + + httpRequest.getPathInfo()); +==== +1:75c + +2:87c + >>>>>>> TEMP_RIGHT_BRANCH +3:76a +====1 +1:89,96c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + return true; + } + } +2:100a +3:89a +====1 +1:98,110c + } + return false; + } + + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +2:102,104c +3:91,93c + if (MimeType.shouldGuess()) { + httpResponse.setHeader(MimeType.CONTENT_TYPE, MimeType.fromResource(resource)); + } +====1 +1:111a +2:106c +3:95c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +====1 +1:113,114c + IOUtils.copy(stream, wrappedOutputStream); + +2:108c +3:97c + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); +====1 +1:117c + +2:110a +3:99a +====1 +1:120a +2:114c +3:103c + +====2 +1:124a +3:107a +2:119,169c + <<<<<<< HEAD + ||||||| 114d245e + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + + IOUtils.copy(stream, wrappedOutputStream); + + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + + return true; + } + } + } + return false; + } + + ======= + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + if (MimeType.shouldGuess()) { + httpResponse.setHeader(MimeType.CONTENT_TYPE, MimeType.fromPathInfo(httpRequest.getPathInfo())); + } + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + + IOUtils.copy(stream, wrappedOutputStream); + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + return true; + } + } + } + return false; + } + + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:135,139c + if (jarResourceHandlers != null) { + jarResourceHandlers.clear(); + jarResourceHandlers = null; + } + +2:179a +3:117a +====1 +1:153,163c + try { + ClassPathResource resource = new ClassPathResource(folder); + + if (configureJarCase(folder, resource)) { + return; + } + + if (!resource.getFile().isDirectory()) { + LOG.error("Static resource location must be a folder"); + return; + } +2:192a +3:130a +====1 +1:165,172c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); + } + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + } catch (IOException e) { + LOG.error("Error when creating StaticResourceHandler", e); +2:194,195c +3:132,133c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); +==== +1:173a +2:197,227c + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + <<<<<<< HEAD + ||||||| 114d245e + } catch (IOException e) { + LOG.error("Error when creating StaticResourceHandler", e); + } + staticResourcesSet = true; + } + + } + + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + ======= + } catch (IOException e) { + LOG.error("Error when creating StaticResourceHandler", e); + } + + StaticFilesFolder.localConfiguredTo(folder); +3:135,138c + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + StaticFilesFolder.localConfiguredTo(folder); +==== +1:179,200c + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } else { + LOG.error("Static file configuration failed."); + } + + } + return false; + } + +2:233,259c + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + >>>>>>> TEMP_RIGHT_BRANCH + staticResourcesSet = true; + <<<<<<< HEAD + ||||||| 114d245e + return true; + } else { + LOG.error("Static file configuration failed."); + ======= + return true; + >>>>>>> TEMP_RIGHT_BRANCH + } + + LOG.error("Static file configuration failed."); + } + +3:143a +====1 +1:224a +2:284,285c +3:168,169c + + StaticFilesFolder.externalConfiguredTo(folder); diff --git a/src/python/merge_conflict_analysis_diffs/1642/gitmerge_recursive_minimal/diff_StaticFilesConfiguration.java.txt b/src/python/merge_conflict_analysis_diffs/1642/gitmerge_recursive_minimal/diff_StaticFilesConfiguration.java.txt new file mode 100644 index 0000000000..4a4c55e2dd --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1642/gitmerge_recursive_minimal/diff_StaticFilesConfiguration.java.txt @@ -0,0 +1,329 @@ +====1 +1:20c + import java.io.InputStream; +2:19a +3:19a +====1 +1:36c + import spark.resource.ClassPathResource; +2:34a +3:34a +====1 +1:40c + import spark.resource.JarResourceHandler; +2:37a +3:37a +====1 +1:47c + * TODO: Cache-Control and ETAG +2:44c +3:44c + * TODO: ETAG ? +====1 +1:53c + private List jarResourceHandlers = null; +2:49a +3:49a +====1 +1:62a +2:59,62c +3:59,62c + * Attempt consuming using either static resource handlers or jar resource handlers + * + * @param httpRequest The HTTP servlet request. + * @param httpResponse The HTTP servlet response. +====1 +1:63a +2:64c +3:64c + * @throws IOException in case of IO error. +====1 +1:66a +2:68,71c +3:68,71c + try { + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } +==== +1:68,73c + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } + + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; +2:73,93c + <<<<<<< HEAD + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } + + ||||||| 114d245e + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } + + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; + } + + ======= + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; + } + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + LOG.warn(directoryTraversalDetection.getMessage() + " directory traversal detection for path: " + + httpRequest.getPathInfo()); +3:73,75c + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + LOG.warn(directoryTraversalDetection.getMessage() + " directory traversal detection for path: " + + httpRequest.getPathInfo()); +==== +1:75c + +2:95c + >>>>>>> TEMP_RIGHT_BRANCH +3:76a +====1 +1:89,96c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + return true; + } + } +2:108a +3:89a +====1 +1:98,110c + } + return false; + } + + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +2:110,112c +3:91,93c + if (MimeType.shouldGuess()) { + httpResponse.setHeader(MimeType.CONTENT_TYPE, MimeType.fromResource(resource)); + } +====1 +1:111a +2:114c +3:95c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +====1 +1:113,114c + IOUtils.copy(stream, wrappedOutputStream); + +2:116c +3:97c + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); +====1 +1:117c + +2:118a +3:99a +====1 +1:120a +2:122c +3:103c + +====2 +1:124a +3:107a +2:127,177c + <<<<<<< HEAD + ||||||| 114d245e + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + + IOUtils.copy(stream, wrappedOutputStream); + + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + + return true; + } + } + } + return false; + } + + ======= + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + if (MimeType.shouldGuess()) { + httpResponse.setHeader(MimeType.CONTENT_TYPE, MimeType.fromPathInfo(httpRequest.getPathInfo())); + } + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + + IOUtils.copy(stream, wrappedOutputStream); + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + return true; + } + } + } + return false; + } + + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:135,139c + if (jarResourceHandlers != null) { + jarResourceHandlers.clear(); + jarResourceHandlers = null; + } + +2:187a +3:117a +====1 +1:153,163c + try { + ClassPathResource resource = new ClassPathResource(folder); + + if (configureJarCase(folder, resource)) { + return; + } + + if (!resource.getFile().isDirectory()) { + LOG.error("Static resource location must be a folder"); + return; + } +2:200a +3:130a +====1 +1:165,172c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); + } + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + } catch (IOException e) { + LOG.error("Error when creating StaticResourceHandler", e); +2:202,203c +3:132,133c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); +==== +1:173a +2:205,212c + + <<<<<<< HEAD + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + ||||||| 114d245e + ======= + StaticFilesFolder.localConfiguredTo(folder); + >>>>>>> TEMP_RIGHT_BRANCH +3:135,138c + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + StaticFilesFolder.localConfiguredTo(folder); +==== +1:179,200c + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } else { + LOG.error("Static file configuration failed."); + } + + } + return false; + } + +2:218,264c + <<<<<<< HEAD + ||||||| 114d245e + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } else { + LOG.error("Static file configuration failed."); + } + + } + return false; + } + + ======= + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } + + LOG.error("Static file configuration failed."); + } + return false; + } + + >>>>>>> TEMP_RIGHT_BRANCH +3:143a +====1 +1:224a +2:289,290c +3:168,169c + + StaticFilesFolder.externalConfiguredTo(folder); diff --git a/src/python/merge_conflict_analysis_diffs/1642/gitmerge_recursive_myers/diff_StaticFilesConfiguration.java.txt b/src/python/merge_conflict_analysis_diffs/1642/gitmerge_recursive_myers/diff_StaticFilesConfiguration.java.txt new file mode 100644 index 0000000000..4a4c55e2dd --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1642/gitmerge_recursive_myers/diff_StaticFilesConfiguration.java.txt @@ -0,0 +1,329 @@ +====1 +1:20c + import java.io.InputStream; +2:19a +3:19a +====1 +1:36c + import spark.resource.ClassPathResource; +2:34a +3:34a +====1 +1:40c + import spark.resource.JarResourceHandler; +2:37a +3:37a +====1 +1:47c + * TODO: Cache-Control and ETAG +2:44c +3:44c + * TODO: ETAG ? +====1 +1:53c + private List jarResourceHandlers = null; +2:49a +3:49a +====1 +1:62a +2:59,62c +3:59,62c + * Attempt consuming using either static resource handlers or jar resource handlers + * + * @param httpRequest The HTTP servlet request. + * @param httpResponse The HTTP servlet response. +====1 +1:63a +2:64c +3:64c + * @throws IOException in case of IO error. +====1 +1:66a +2:68,71c +3:68,71c + try { + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } +==== +1:68,73c + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } + + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; +2:73,93c + <<<<<<< HEAD + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } + + ||||||| 114d245e + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } + + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; + } + + ======= + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; + } + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + LOG.warn(directoryTraversalDetection.getMessage() + " directory traversal detection for path: " + + httpRequest.getPathInfo()); +3:73,75c + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + LOG.warn(directoryTraversalDetection.getMessage() + " directory traversal detection for path: " + + httpRequest.getPathInfo()); +==== +1:75c + +2:95c + >>>>>>> TEMP_RIGHT_BRANCH +3:76a +====1 +1:89,96c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + return true; + } + } +2:108a +3:89a +====1 +1:98,110c + } + return false; + } + + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +2:110,112c +3:91,93c + if (MimeType.shouldGuess()) { + httpResponse.setHeader(MimeType.CONTENT_TYPE, MimeType.fromResource(resource)); + } +====1 +1:111a +2:114c +3:95c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +====1 +1:113,114c + IOUtils.copy(stream, wrappedOutputStream); + +2:116c +3:97c + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); +====1 +1:117c + +2:118a +3:99a +====1 +1:120a +2:122c +3:103c + +====2 +1:124a +3:107a +2:127,177c + <<<<<<< HEAD + ||||||| 114d245e + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + + IOUtils.copy(stream, wrappedOutputStream); + + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + + return true; + } + } + } + return false; + } + + ======= + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + if (MimeType.shouldGuess()) { + httpResponse.setHeader(MimeType.CONTENT_TYPE, MimeType.fromPathInfo(httpRequest.getPathInfo())); + } + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + + IOUtils.copy(stream, wrappedOutputStream); + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + return true; + } + } + } + return false; + } + + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:135,139c + if (jarResourceHandlers != null) { + jarResourceHandlers.clear(); + jarResourceHandlers = null; + } + +2:187a +3:117a +====1 +1:153,163c + try { + ClassPathResource resource = new ClassPathResource(folder); + + if (configureJarCase(folder, resource)) { + return; + } + + if (!resource.getFile().isDirectory()) { + LOG.error("Static resource location must be a folder"); + return; + } +2:200a +3:130a +====1 +1:165,172c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); + } + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + } catch (IOException e) { + LOG.error("Error when creating StaticResourceHandler", e); +2:202,203c +3:132,133c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); +==== +1:173a +2:205,212c + + <<<<<<< HEAD + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + ||||||| 114d245e + ======= + StaticFilesFolder.localConfiguredTo(folder); + >>>>>>> TEMP_RIGHT_BRANCH +3:135,138c + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + StaticFilesFolder.localConfiguredTo(folder); +==== +1:179,200c + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } else { + LOG.error("Static file configuration failed."); + } + + } + return false; + } + +2:218,264c + <<<<<<< HEAD + ||||||| 114d245e + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } else { + LOG.error("Static file configuration failed."); + } + + } + return false; + } + + ======= + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } + + LOG.error("Static file configuration failed."); + } + return false; + } + + >>>>>>> TEMP_RIGHT_BRANCH +3:143a +====1 +1:224a +2:289,290c +3:168,169c + + StaticFilesFolder.externalConfiguredTo(folder); diff --git a/src/python/merge_conflict_analysis_diffs/1642/gitmerge_recursive_patience/diff_StaticFilesConfiguration.java.txt b/src/python/merge_conflict_analysis_diffs/1642/gitmerge_recursive_patience/diff_StaticFilesConfiguration.java.txt new file mode 100644 index 0000000000..67e5c45589 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1642/gitmerge_recursive_patience/diff_StaticFilesConfiguration.java.txt @@ -0,0 +1,323 @@ +====1 +1:20c + import java.io.InputStream; +2:19a +3:19a +====1 +1:36c + import spark.resource.ClassPathResource; +2:34a +3:34a +====1 +1:40c + import spark.resource.JarResourceHandler; +2:37a +3:37a +====1 +1:47c + * TODO: Cache-Control and ETAG +2:44c +3:44c + * TODO: ETAG ? +====1 +1:53c + private List jarResourceHandlers = null; +2:49a +3:49a +====1 +1:62a +2:59,62c +3:59,62c + * Attempt consuming using either static resource handlers or jar resource handlers + * + * @param httpRequest The HTTP servlet request. + * @param httpResponse The HTTP servlet response. +====1 +1:63a +2:64c +3:64c + * @throws IOException in case of IO error. +====1 +1:66a +2:68,71c +3:68,71c + try { + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } +==== +1:68,73c + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } + + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; +2:73,78c + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; + } + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + LOG.warn(directoryTraversalDetection.getMessage() + " directory traversal detection for path: " + + httpRequest.getPathInfo()); +3:73,75c + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + LOG.warn(directoryTraversalDetection.getMessage() + " directory traversal detection for path: " + + httpRequest.getPathInfo()); +==== +1:75c + +2:80,89c + <<<<<<< HEAD + + ||||||| 114d245e + + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; + } + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:76a +====1 +1:89,96c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + return true; + } + } +2:102a +3:89a +====1 +1:98,110c + } + return false; + } + + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +2:104,106c +3:91,93c + if (MimeType.shouldGuess()) { + httpResponse.setHeader(MimeType.CONTENT_TYPE, MimeType.fromResource(resource)); + } +====1 +1:111a +2:108c +3:95c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +====1 +1:113,114c + IOUtils.copy(stream, wrappedOutputStream); + +2:110c +3:97c + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); +====1 +1:117c + +2:112a +3:99a +====1 +1:120a +2:116c +3:103c + +====2 +1:124a +3:107a +2:121,171c + <<<<<<< HEAD + ||||||| 114d245e + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + + IOUtils.copy(stream, wrappedOutputStream); + + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + + return true; + } + } + } + return false; + } + + ======= + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + if (MimeType.shouldGuess()) { + httpResponse.setHeader(MimeType.CONTENT_TYPE, MimeType.fromPathInfo(httpRequest.getPathInfo())); + } + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + + IOUtils.copy(stream, wrappedOutputStream); + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + return true; + } + } + } + return false; + } + + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:135,139c + if (jarResourceHandlers != null) { + jarResourceHandlers.clear(); + jarResourceHandlers = null; + } + +2:181a +3:117a +====1 +1:153,163c + try { + ClassPathResource resource = new ClassPathResource(folder); + + if (configureJarCase(folder, resource)) { + return; + } + + if (!resource.getFile().isDirectory()) { + LOG.error("Static resource location must be a folder"); + return; + } +2:194a +3:130a +====1 +1:165,172c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); + } + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + } catch (IOException e) { + LOG.error("Error when creating StaticResourceHandler", e); +2:196,197c +3:132,133c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); +==== +1:173a +2:199,206c + + <<<<<<< HEAD + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + ||||||| 114d245e + ======= + StaticFilesFolder.localConfiguredTo(folder); + >>>>>>> TEMP_RIGHT_BRANCH +3:135,138c + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + StaticFilesFolder.localConfiguredTo(folder); +==== +1:179,200c + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } else { + LOG.error("Static file configuration failed."); + } + + } + return false; + } + +2:212,258c + <<<<<<< HEAD + ||||||| 114d245e + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } else { + LOG.error("Static file configuration failed."); + } + + } + return false; + } + + ======= + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } + + LOG.error("Static file configuration failed."); + } + return false; + } + + >>>>>>> TEMP_RIGHT_BRANCH +3:143a +====1 +1:224a +2:283,284c +3:168,169c + + StaticFilesFolder.externalConfiguredTo(folder); diff --git a/src/python/merge_conflict_analysis_diffs/1642/intellimerge/diff_StaticFilesConfiguration.java.txt b/src/python/merge_conflict_analysis_diffs/1642/intellimerge/diff_StaticFilesConfiguration.java.txt new file mode 100644 index 0000000000..6bf3f7b8e7 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1642/intellimerge/diff_StaticFilesConfiguration.java.txt @@ -0,0 +1,324 @@ +====1 +1:20c + import java.io.InputStream; +2:19a +3:19a +====1 +1:36c + import spark.resource.ClassPathResource; +2:34a +3:34a +====1 +1:40c + import spark.resource.JarResourceHandler; +2:37a +3:37a +====1 +1:47c + * TODO: Cache-Control and ETAG +2:44c +3:44c + * TODO: ETAG ? +====1 +1:53c + private List jarResourceHandlers = null; +2:49a +3:49a +====1 +1:62a +2:59,62c +3:59,62c + * Attempt consuming using either static resource handlers or jar resource handlers + * + * @param httpRequest The HTTP servlet request. + * @param httpResponse The HTTP servlet response. +====1 +1:63a +2:64c +3:64c + * @throws IOException in case of IO error. +====1 +1:66a +2:68,71c +3:68,71c + try { + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } +==== +1:68,73c + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } + + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; +2:73,78c + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; + } + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + LOG.warn(directoryTraversalDetection.getMessage() + " directory traversal detection for path: " + + httpRequest.getPathInfo()); +3:73,75c + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + LOG.warn(directoryTraversalDetection.getMessage() + " directory traversal detection for path: " + + httpRequest.getPathInfo()); +==== +1:75c + +2:80,89c + <<<<<<< HEAD + + ||||||| 114d245e + + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; + } + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:76a +====1 +1:89,96c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + return true; + } + } +2:102a +3:89a +====1 +1:98,110c + } + return false; + } + + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +2:104,106c +3:91,93c + if (MimeType.shouldGuess()) { + httpResponse.setHeader(MimeType.CONTENT_TYPE, MimeType.fromResource(resource)); + } +====1 +1:111a +2:108c +3:95c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +====1 +1:113,114c + IOUtils.copy(stream, wrappedOutputStream); + +2:110c +3:97c + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); +====1 +1:117c + +2:112a +3:99a +====1 +1:120a +2:116c +3:103c + +====2 +1:124a +3:107a +2:121,171c + <<<<<<< HEAD + ||||||| 114d245e + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + + IOUtils.copy(stream, wrappedOutputStream); + + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + + return true; + } + } + } + return false; + } + + ======= + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + if (MimeType.shouldGuess()) { + httpResponse.setHeader(MimeType.CONTENT_TYPE, MimeType.fromPathInfo(httpRequest.getPathInfo())); + } + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + + IOUtils.copy(stream, wrappedOutputStream); + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + return true; + } + } + } + return false; + } + + >>>>>>> TEMP_RIGHT_BRANCH +====1 +1:135,139c + if (jarResourceHandlers != null) { + jarResourceHandlers.clear(); + jarResourceHandlers = null; + } + +2:181a +3:117a +====1 +1:153,163c + try { + ClassPathResource resource = new ClassPathResource(folder); + + if (configureJarCase(folder, resource)) { + return; + } + + if (!resource.getFile().isDirectory()) { + LOG.error("Static resource location must be a folder"); + return; + } +2:194a +3:130a +====1 +1:165,172c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); + } + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + } catch (IOException e) { + LOG.error("Error when creating StaticResourceHandler", e); +2:196,197c +3:132,133c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); +==== +1:173a +2:199,207c + <<<<<<< HEAD + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + ||||||| 114d245e + ======= + + StaticFilesFolder.localConfiguredTo(folder); + >>>>>>> TEMP_RIGHT_BRANCH +3:135,138c + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + StaticFilesFolder.localConfiguredTo(folder); +==== +1:179,200c + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } else { + LOG.error("Static file configuration failed."); + } + + } + return false; + } + +2:213,259c + <<<<<<< HEAD + ||||||| 114d245e + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } else { + LOG.error("Static file configuration failed."); + } + + } + return false; + } + + ======= + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } + + LOG.error("Static file configuration failed."); + } + return false; + } + + >>>>>>> TEMP_RIGHT_BRANCH +3:143a +====1 +1:224a +2:284,285c +3:168,169c + + StaticFilesFolder.externalConfiguredTo(folder); diff --git a/src/python/merge_conflict_analysis_diffs/1642/spork/diff_ClassPathResourceHandler.java.txt b/src/python/merge_conflict_analysis_diffs/1642/spork/diff_ClassPathResourceHandler.java.txt new file mode 100644 index 0000000000..57b70ab09c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1642/spork/diff_ClassPathResourceHandler.java.txt @@ -0,0 +1,31 @@ +====3 +1:24a +2:24a +3:25c + import spark.staticfiles.DirectoryTraversal; +====1 +1:72c + if (resource.exists() && resource.getFile().isDirectory()) { +2:72c +3:73c + if (resource.exists() && path.endsWith("/")) { +====1 +1:76c + // No welcome file configured, serve nothing since it's a directory +2:76c +3:77c + // No welcome file configured, serve nothing since it's a directory +====3 +1:81c +2:81c + return (resource != null && resource.exists()) ? resource : null; +3:82,90c + if (resource != null && resource.exists()) { + DirectoryTraversal.protectAgainstInClassPath(resource.getPath()); + return resource; + } else { + return null; + } + + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + throw directoryTraversalDetection; diff --git a/src/python/merge_conflict_analysis_diffs/1642/spork/diff_StaticFilesConfiguration.java.txt b/src/python/merge_conflict_analysis_diffs/1642/spork/diff_StaticFilesConfiguration.java.txt new file mode 100644 index 0000000000..5956031d13 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1642/spork/diff_StaticFilesConfiguration.java.txt @@ -0,0 +1,200 @@ +====1 +1:20c + import java.io.InputStream; +2:19a +3:19a +====1 +1:36c + import spark.resource.ClassPathResource; +2:34a +3:34a +====1 +1:40c + import spark.resource.JarResourceHandler; +2:37a +3:37a +====3 +1:47c +2:44c + * TODO: Cache-Control and ETAG +3:44c + * TODO: ETAG ? +====1 +1:53c + private List jarResourceHandlers = null; +2:49a +3:49a +====3 +1:62a +2:58a +3:59,62c + * Attempt consuming using either static resource handlers or jar resource handlers + * + * @param httpRequest The HTTP servlet request. + * @param httpResponse The HTTP servlet response. +====3 +1:63a +2:59a +3:64c + * @throws IOException in case of IO error. +====3 +1:66a +2:62a +3:68,71c + try { + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } +==== +1:68,73c + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; + } + + if (consumeWithJarResourceHandler(httpRequest, httpResponse)) { + return true; +2:64,65c + if (consumeWithFileResourceHandlers(httpRequest, httpResponse)) { + return true; +3:73,75c + } catch (DirectoryTraversal.DirectoryTraversalDetection directoryTraversalDetection) { + LOG.warn(directoryTraversalDetection.getMessage() + " directory traversal detection for path: " + + httpRequest.getPathInfo()); +====3 +1:75c +2:67c + +3:76a +==== +1:89,110c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + customHeaders.forEach(httpResponse::setHeader); //add all user-defined headers to response + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); + wrappedOutputStream.flush(); + wrappedOutputStream.close(); + return true; + } + } + + } + return false; + } + + private boolean consumeWithJarResourceHandler(HttpServletRequest httpRequest, + HttpServletResponse httpResponse) throws IOException { + if (jarResourceHandlers != null) { + + for (JarResourceHandler jarResourceHandler : jarResourceHandlers) { + InputStream stream = jarResourceHandler.getResource(httpRequest); + + if (stream != null) { + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +2:81c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); +3:90,93c + + if (MimeType.shouldGuess()) { + httpResponse.setHeader(MimeType.CONTENT_TYPE, MimeType.fromResource(resource)); + } +==== +1:112,114c + + IOUtils.copy(stream, wrappedOutputStream); + +2:83c + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); +3:95,97c + OutputStream wrappedOutputStream = GzipUtils.checkAndWrap(httpRequest, httpResponse, false); + + IOUtils.copy(resource.getInputStream(), wrappedOutputStream); +====1 +1:117c + +2:85a +3:99a +====1 +1:120a +2:89c +3:103c + +====1 +1:135,139c + if (jarResourceHandlers != null) { + jarResourceHandlers.clear(); + jarResourceHandlers = null; + } + +2:103a +3:117a +====1 +1:153,163c + try { + ClassPathResource resource = new ClassPathResource(folder); + + if (configureJarCase(folder, resource)) { + return; + } + + if (!resource.getFile().isDirectory()) { + LOG.error("Static resource location must be a folder"); + return; + } +2:116a +3:130a +====1 +1:165,172c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); + } + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + } catch (IOException e) { + LOG.error("Error when creating StaticResourceHandler", e); +2:118,119c +3:132,133c + if (staticResourceHandlers == null) { + staticResourceHandlers = new ArrayList<>(); +==== +1:173a +2:121,123c + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); +3:135,138c + + staticResourceHandlers.add(new ClassPathResourceHandler(folder, "index.html")); + LOG.info("StaticResourceHandler configured with folder = " + folder); + StaticFilesFolder.localConfiguredTo(folder); +====1 +1:179,200c + private boolean configureJarCase(String folder, ClassPathResource resource) throws IOException { + if (resource.getURL().getProtocol().equals("jar")) { + + InputStream stream = StaticFilesConfiguration.class.getResourceAsStream(folder); + + if (stream != null) { + if (jarResourceHandlers == null) { + jarResourceHandlers = new ArrayList<>(); + } + + // Add jar file resource handler + jarResourceHandlers.add(new JarResourceHandler(folder, "index.html")); + staticResourcesSet = true; + return true; + } else { + LOG.error("Static file configuration failed."); + } + + } + return false; + } + +2:128a +3:143a +====3 +1:224a +2:152a +3:168,169c + + StaticFilesFolder.externalConfiguredTo(folder); diff --git a/src/python/merge_conflict_analysis_diffs/184/git_hires_merge/diff_CollectionUtils.java.txt b/src/python/merge_conflict_analysis_diffs/184/git_hires_merge/diff_CollectionUtils.java.txt new file mode 100644 index 0000000000..9187319c0a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/git_hires_merge/diff_CollectionUtils.java.txt @@ -0,0 +1,884 @@ +====1 +1:30a +2:31c +3:31c + import java.util.Objects; +====3 +1:75c +2:76c + public CardinalityHelper(final Iterable a, final Iterable b) { +3:76c + CardinalityHelper(final Iterable a, final Iterable b) { +====3 +1:142c +2:143c + public SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +3:143c + SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +====3 +1:187c +2:188c + * CollectionUtils should not normally be instantiated. +3:188c + * {@code CollectionUtils} should not normally be instantiated. +====3 +1:205c +2:206c + * Returns an immutable empty collection if the argument is null, +3:206c + * Returns an immutable empty collection if the argument is {@code null}, +====3 +1:209,210c +2:210,211c + * @param collection the collection, possibly null + * @return an empty collection if the argument is null +3:210,211c + * @param collection the collection, possibly {@code null} + * @return an empty collection if the argument is {@code null} +====1 +1:232a +2:234,235c +3:234,235c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:257a +2:261,262c +3:261,262c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:287a +2:293,294c +3:293,294c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:340a +2:348,350c +3:348,350c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(p, "The predicate must not be null."); +====3 +1:357c +2:367c + * Returns true iff all elements of {@code coll2} are also contained +3:367c + * Returns {@code true} iff all elements of {@code coll2} are also contained +====3 +1:361c +2:371c + * In other words, this method returns true iff the +3:371c + * In other words, this method returns {@code true} iff the +====3 +1:376c +2:386c + * @return true iff the intersection of the collections has the same cardinality +3:386c + * @return {@code true} iff the intersection of the collections has the same cardinality +====1 +1:380a +2:391,392c +3:391,392c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====3 +1:409c +2:421c + * Returns true iff at least one element is in both collections. +3:421c + * Returns {@code true} iff at least one element is in both collections. +====3 +1:411c +2:423c + * In other words, this method returns true iff the +3:423c + * In other words, this method returns {@code true} iff the +====3 +1:415c +2:427c + * @param the type of object to lookup in coll1. +3:427c + * @param the type of object to lookup in {@code coll1}. +====3 +1:418c +2:430c + * @return true iff the intersection of the collections is non-empty +3:430c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:422a +2:435,436c +3:435,436c + Objects.requireNonNull(coll1, "The collection must not be null."); + Objects.requireNonNull(coll2, "The elements must not be null."); +====3 +1:440c +2:454c + * Returns true iff at least one element is in both collections. +3:454c + * Returns {@code true} iff at least one element is in both collections. +====3 +1:442c +2:456c + * In other words, this method returns true iff the +3:456c + * In other words, this method returns {@code true} iff the +====3 +1:448c +2:462c + * @return true iff the intersection of the collections is non-empty +3:462c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:452a +2:467,468c +3:467,468c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====1 +1:482a +2:499c +3:499c + Objects.requireNonNull(coll, "The collection must not be null."); +====3 +1:503c +2:520c + * @return true iff a is a sub-collection of b +3:520c + * @return {@code true} iff a is a sub-collection of b +====1 +1:507a +2:525,526c +3:525,526c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====3 +1:528c +2:547c + *
  • a.size() and b.size() represent the +3:547c + *
  • {@code a.size()} and {@code b.size()} represent the +====3 +1:530c +2:549c + *
  • a.size() < Integer.MAXVALUE
  • +3:549c + *
  • {@code a.size() < Integer.MAXVALUE}
  • +====3 +1:535c +2:554c + * @return true iff a is a proper sub-collection of b +3:554c + * @return {@code true} iff a is a proper sub-collection of b +====1 +1:539a +2:559,560c +3:559,560c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====3 +1:554c +2:575c + * @return true iff the collections contain the same elements with the same cardinalities. +3:575c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:557c + if(a.size() != b.size()) { +2:578,580c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if(a.size() != b.size()) { +3:578,580c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if (a.size() != b.size()) { +====3 +1:561c +2:584c + if(helper.cardinalityA.size() != helper.cardinalityB.size()) { +3:584c + if (helper.cardinalityA.size() != helper.cardinalityB.size()) { +====3 +1:564,565c +2:587,588c + for( final Object obj : helper.cardinalityA.keySet()) { + if(helper.freqA(obj) != helper.freqB(obj)) { +3:587,588c + for (final Object obj : helper.cardinalityA.keySet()) { + if (helper.freqA(obj) != helper.freqB(obj)) { +====3 +1:591c +2:614c + * @return true iff the collections contain the same elements with the same cardinalities. +3:614c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:598,602c + if (equator == null) { + throw new NullPointerException("Equator must not be null."); + } + + if(a.size() != b.size()) { +2:621,624c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); + if(a.size() != b.size()) { +3:621,625c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "equator"); + + if (a.size() != b.size()) { +====3 +1:626c +2:648c + public EquatorWrapper(final Equator equator, final O object) { +3:649c + EquatorWrapper(final Equator equator, final O object) { +====3 +1:655c +2:677c + * @param coll the {@link Iterable} to search +3:678c + * @param collection the {@link Iterable} to search +==== +1:663,667c + public static int cardinality(final O obj, final Iterable coll) { + if (coll == null) { + throw new NullPointerException("coll must not be null."); + } + return IterableUtils.frequency(coll, obj); +2:685,687c + public static int cardinality(final O obj, final Iterable coll) { + Objects.requireNonNull(coll, "The collection must not be null."); + return IterableUtils.frequency(coll, obj); +3:686,687c + public static int cardinality(final O obj, final Iterable collection) { + return IterableUtils.frequency(Objects.requireNonNull(collection, "collection"), obj); +====3 +1:799c +2:819c + * This is equivalent to filter(collection, PredicateUtils.notPredicate(predicate)) +3:819c + * This is equivalent to {@code filter(collection, PredicateUtils.notPredicate(predicate))} +====3 +1:856c +2:876c + * A null collection or predicate matches no elements. +3:876c + * A {@code null} collection or predicate matches no elements. +====3 +1:874c +2:894c + * A null collection or predicate returns false. +3:894c + * A {@code null} collection or predicate returns false. +====3 +1:893c +2:913c + * A null predicate returns false. +3:913c + * A {@code null} predicate returns false. +====3 +1:896c +2:916c + * A null or empty collection returns true. +3:916c + * A {@code null} or empty collection returns true. +====3 +1:916c +2:936c + * A null predicate matches no elements. +3:936c + * A {@code null} predicate matches no elements. +====1 +1:923c + * @throws NullPointerException if the input collection is null +2:942a +3:942a +====1 +1:927,928c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:946,950c +3:946,950c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====3 +1:965,966c +2:987,988c + * Elements matching the predicate are added to the outputCollection, + * all other elements are added to the rejectedCollection. +3:987,988c + * Elements matching the predicate are added to the {@code outputCollection}, + * all other elements are added to the {@code rejectedCollection}. +====3 +1:969,970c +2:991,992c + * If the input predicate is null, no elements are added to + * outputCollection or rejectedCollection. +3:991,992c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection} or {@code rejectedCollection}. +====3 +1:1010c +2:1032c + * If the input predicate is null, the result is an empty +3:1032c + * If the input predicate is {@code null}, the result is an empty +====1 +1:1018c + * @throws NullPointerException if the input collection is null +2:1039a +3:1039a +====1 +1:1022,1023c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1043,1047c +3:1043,1047c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====3 +1:1031,1032c +2:1055,1056c + * If the input predicate is null, no elements are added to + * outputCollection. +3:1055,1056c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection}. +====1 +1:1072,1073c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1096,1100c +3:1096,1100c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +==== +1:1165,1167c + if (collection == null) { + throw new NullPointerException("The collection must not be null"); + } +2:1192c + Objects.requireNonNull(collection, "The collection must not be null."); +3:1192c + Objects.requireNonNull(collection, "collection"); +====1 +1:1182a +2:1208,1209c +3:1208,1209c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterable, "The iterable of elements to add must not be null."); +====1 +1:1198a +2:1226,1227c +3:1226,1227c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterator, "The iterator of elements to add must not be null."); +====1 +1:1215a +2:1245,1246c +3:1245,1246c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(enumeration, "The enumeration of elements to add must not be null."); +====1 +1:1232a +2:1264,1265c +3:1264,1265c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(elements, "The array of elements to add must not be null."); +====3 +1:1241,1242c +2:1274,1275c + * Returns the index-th value in {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +3:1274,1275c + * Returns the {@code index}-th value in {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====3 +1:1244,1245c +2:1277,1278c + * The Iterator is advanced to index (or to the end, if + * index exceeds the number of entries) as a side effect of this method. +3:1277,1278c + * The Iterator is advanced to {@code index} (or to the end, if + * {@code index} exceeds the number of entries) as a side effect of this method. +====1 +1:1257a +2:1291c +3:1291c + Objects.requireNonNull(iterator, "The iterator must not be null."); +====3 +1:1273,1274c +2:1307,1308c + * Returns the index-th value in the iterable's {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +3:1307,1308c + * Returns the {@code index}-th value in the {@code iterable}'s {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1287a +2:1322c +3:1322c + Objects.requireNonNull(iterable, "The iterable must not be null."); +====3 +1:1292,1294c +2:1327,1329c + * Returns the index-th value in object, throwing + * IndexOutOfBoundsException if there is no such element or + * IllegalArgumentException if object is not an +3:1327,1329c + * Returns the {@code index}-th value in {@code object}, throwing + * {@code IndexOutOfBoundsException} if there is no such element or + * {@code IllegalArgumentException} if {@code object} is not an +====3 +1:1300,1301c +2:1335,1336c + *
  • Map -- the value returned is the Map.Entry in position + * index in the map's entrySet iterator, +3:1335,1336c + *
  • Map -- the value returned is the {@code Map.Entry} in position + * {@code index} in the map's {@code entrySet} iterator, +====3 +1:1304,1305c +2:1339,1340c + *
  • Array -- the index-th array entry is returned, + * if there is such an entry; otherwise an IndexOutOfBoundsException +3:1339,1340c + *
  • Array -- the {@code index}-th array entry is returned, + * if there is such an entry; otherwise an {@code IndexOutOfBoundsException} +====3 +1:1307c +2:1342c + *
  • Collection -- the value returned is the index-th object +3:1342c + *
  • Collection -- the value returned is the {@code index}-th object +====3 +1:1310c +2:1345c + * index-th object in the Iterator/Enumeration, if there +3:1345c + * {@code index}-th object in the Iterator/Enumeration, if there +====3 +1:1312c +2:1347c + * index (or to the end, if index exceeds the +3:1347c + * {@code index} (or to the end, if {@code index} exceeds the +====3 +1:1327c +2:1362c + if (object instanceof Map) { +3:1362c + if (object instanceof Map) { +====3 +1:1354,1355c +2:1389,1390c + * Returns the index-th Map.Entry in the map's entrySet, + * throwing IndexOutOfBoundsException if there is no such element. +3:1389,1390c + * Returns the {@code index}-th {@code Map.Entry} in the {@code map}'s {@code entrySet}, + * throwing {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1358c + * @param the key type in the {@link Map} +2:1393c +3:1393c + * @param the value type in the {@link Map} +====1 +1:1364c + public static Map.Entry get(final Map map, final int index) { +2:1399,1400c +3:1399,1400c + public static Map.Entry get(final Map map, final int index) { + Objects.requireNonNull(map, "The map must not be null."); +====3 +1:1392c +2:1428c + if (object instanceof Map) { +3:1428c + if (object instanceof Map) { +====1 +1:1499a +2:1536c +3:1536c + Objects.requireNonNull(array, "The array must not be null."); +====3 +1:1526c +2:1563c + * @param coll the collection to check +3:1563c + * @param collection the collection to check +==== +1:1530,1535c + public static boolean isFull(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); +2:1567,1570c + public static boolean isFull(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); +3:1567,1570c + public static boolean isFull(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).isFull(); +====3 +1:1539c +2:1574c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +3:1574c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====3 +1:1559c +2:1594c + * @param coll the collection to check +3:1594c + * @param collection the collection to check +==== +1:1563,1568c + public static int maxSize(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); +2:1598,1601c + public static int maxSize(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); +3:1598,1601c + public static int maxSize(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).maxSize(); +====3 +1:1572c +2:1605c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +3:1605c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====3 +1:1649,1651c +2:1682,1684c + * @param a the first collection, must not be null + * @param b the second collection, must not be null + * @param c the comparator to use for the merge. +3:1682,1684c + * @param iterableA the first collection, must not be null + * @param iterableB the second collection, must not be null + * @param comparator the comparator to use for the merge. +==== +1:1658,1666c + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + + if (a == null || b == null) { + throw new NullPointerException("The collections must not be null"); + } + if (c == null) { + throw new NullPointerException("The comparator must not be null"); + } +2:1691,1695c + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(c, "The comparator must not be null."); +3:1691,1696c + public static List collate(final Iterable iterableA, final Iterable iterableB, + final Comparator comparator, final boolean includeDuplicates) { + + Objects.requireNonNull(iterableA, "iterableA"); + Objects.requireNonNull(iterableB, "iterableB"); + Objects.requireNonNull(comparator, "comparator"); +====3 +1:1669,1670c +2:1698,1699c + final int totalSize = a instanceof Collection && b instanceof Collection ? + Math.max(1, ((Collection) a).size() + ((Collection) b).size()) : 10; +3:1699,1700c + final int totalSize = iterableA instanceof Collection && iterableB instanceof Collection ? + Math.max(1, ((Collection) iterableA).size() + ((Collection) iterableB).size()) : 10; +====3 +1:1672c +2:1701c + final Iterator iterator = new CollatingIterator<>(c, a.iterator(), b.iterator()); +3:1702c + final Iterator iterator = new CollatingIterator<>(comparator, iterableA.iterator(), iterableB.iterator()); +====1 +1:1713a +2:1743c +3:1744c + Objects.requireNonNull(collection, "The collection must not be null."); +====3 +1:1724,1727c +2:1754,1757c + * Returns a collection containing all the elements in collection + * that are also in retain. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless retain does not contain e, in which +3:1755,1758c + * Returns a collection containing all the elements in {@code collection} + * that are also in {@code retain}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code retain} does not contain {@code e}, in which +====3 +1:1729c +2:1759c + * the collection c and thus cannot call c.retainAll(retain);. +3:1760c + * the collection {@code c} and thus cannot call {@code c.retainAll(retain);}. +====3 +1:1731,1732c +2:1761,1762c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in retain. If it's contained, it's added +3:1762,1763c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code retain}. If it's contained, it's added +====3 +1:1734c +2:1764c + * retain that provides a fast (e.g. O(1)) implementation of +3:1765c + * {@code retain} that provides a fast (e.g. O(1)) implementation of +====3 +1:1741,1742c +2:1771,1772c + * @return a Collection containing all the elements of collection + * that occur at least once in retain. +3:1772,1773c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain}. +====1 +1:1746a +2:1777,1778c +3:1778,1779c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); +====3 +1:1752,1755c +2:1784,1787c + * collection that are also in retain. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless retain does not contain e, in which case +3:1785,1788c + * {@code collection} that are also in {@code retain}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code retain} does not contain {@code e}, in which case +====3 +1:1757,1758c +2:1789,1790c + * modify the collection c and thus cannot call + * c.retainAll(retain);. +3:1790,1791c + * modify the collection {@code c} and thus cannot call + * {@code c.retainAll(retain);}. +====3 +1:1762c +2:1794c + * in collection and retain. Hence this method is +3:1795c + * in {@code collection} and {@code retain}. Hence this method is +====3 +1:1771,1772c +2:1803,1804c + * @return a Collection containing all the elements of collection + * that occur at least once in retain according to the equator +3:1804,1805c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain} according to the {@code equator} +====1 +1:1779c + +2:1811,1813c +3:1812,1814c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1800,1802c + * @param input the collection will be operated, can't be null + * @param startIndex the start index (inclusive) to remove element, can't be less than 0 + * @param endIndex the end index (exclusive) to remove, can't be less than startIndex +2:1834,1836c +3:1835,1837c + * @param input the collection will be operated, must not be null + * @param startIndex the start index (inclusive) to remove element, must not be less than 0 + * @param endIndex the end index (exclusive) to remove, must not be less than startIndex +====1 +1:1807,1809c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1841c +3:1842c + Objects.requireNonNull(input, "The collection must not be null."); +====1 +1:1831,1833c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1863c +3:1864c + Objects.requireNonNull(input, "The collection must not be null."); +====3 +1:1845,1846c +2:1875,1876c + Collection result = new ArrayList(count); + Iterator iterator = input.iterator(); +3:1876,1877c + final Collection result = new ArrayList<>(count); + final Iterator iterator = input.iterator(); +====3 +1:1861,1865c +2:1891,1895c + * Removes the elements in remove from collection. That is, this + * method returns a collection containing all the elements in c + * that are not in remove. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless remove contains e, in which +3:1892,1896c + * Removes the elements in {@code remove} from {@code collection}. That is, this + * method returns a collection containing all the elements in {@code c} + * that are not in {@code remove}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code remove} contains {@code e}, in which +====3 +1:1867c +2:1897c + * the collection c and thus cannot call collection.removeAll(remove);. +3:1898c + * the collection {@code c} and thus cannot call {@code collection.removeAll(remove);}. +====3 +1:1869,1870c +2:1899,1900c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in remove. If it's not contained, it's added +3:1900,1901c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code remove}. If it's not contained, it's added +====3 +1:1872c +2:1902c + * remove that provides a fast (e.g. O(1)) implementation of +3:1903c + * {@code remove} that provides a fast (e.g. O(1)) implementation of +====3 +1:1878,1880c +2:1908,1910c + * @param remove the items to be removed from the returned collection + * @return a Collection containing all the elements of collection except + * any elements that also occur in remove. +3:1909,1911c + * @param remove the items to be removed from the returned {@code collection} + * @return a {@code Collection} containing all the elements of {@code collection} except + * any elements that also occur in {@code remove}. +====3 +1:1886c +2:1916c + } +3:1917c + } +====3 +1:1889c +2:1919c + * Removes all elements in remove from collection. +3:1920c + * Removes all elements in {@code remove} from {@code collection}. +====3 +1:1891,1894c +2:1921,1924c + * collection that are not in remove. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless remove contains e, in which case the +3:1922,1925c + * {@code collection} that are not in {@code remove}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code remove} contains {@code e}, in which case the +====3 +1:1896,1897c +2:1926,1927c + * the collection c and thus cannot call + * collection.removeAll(remove). +3:1927,1928c + * the collection {@code c} and thus cannot call + * {@code collection.removeAll(remove)}. +====3 +1:1901c +2:1931c + * in collection and remove. Hence this method is +3:1932c + * in {@code collection} and {@code remove}. Hence this method is +====3 +1:1910,1911c +2:1940,1941c + * @return a Collection containing all the elements of collection + * except any element that if equal according to the equator +3:1941,1942c + * @return a {@code Collection} containing all the elements of {@code collection} + * except any element that if equal according to the {@code equator} +====1 +1:1918c + +2:1948,1950c +3:1949,1951c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(remove, "The items to be removed must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1960a +2:1993c +3:1994c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1977a +2:2011c +3:2012c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1997a +2:2032,2033c +3:2033,2034c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(predicate, "The predicate must not be null."); +====1 +1:2020a +2:2057,2058c +3:2058,2059c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(transformer, "The transformer must not be null."); +==== +1:2035,2037c + if (collection == null) { + throw new NullPointerException("Collection must not be null."); + } +2:2073c + Objects.requireNonNull(collection, "The collection must not be null."); +3:2074c + Objects.requireNonNull(collection, "collection"); diff --git a/src/python/merge_conflict_analysis_diffs/184/git_hires_merge/diff_CollectionUtilsTest.java.txt b/src/python/merge_conflict_analysis_diffs/184/git_hires_merge/diff_CollectionUtilsTest.java.txt new file mode 100644 index 0000000000..27920f0ad8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/git_hires_merge/diff_CollectionUtilsTest.java.txt @@ -0,0 +1,652 @@ +====1 +1:193a +2:194,198c +3:194,198c + @Test(expected = NullPointerException.class) + public void testGetCardinalityMapNull() { + CollectionUtils.getCardinalityMap(null); + } + +====1 +1:349a +2:355,377c +3:355,377c + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.containsAny(null, list); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl3() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:383a +2:412,433c +3:412,433c + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl1() { + final String[] oneArr = {"1"}; + CollectionUtils.containsAny(null, oneArr); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullArray() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:402a +2:453,466c +3:453,466c + @Test(expected = NullPointerException.class) + public void testUnionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(null, list); + } + + @Test(expected = NullPointerException.class) + public void testUnionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(list, null); + } + +====1 +1:421a +2:486,499c +3:486,499c + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(list, null); + } + +====1 +1:440a +2:519,532c +3:519,532c + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(null, list); + } + + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(list, null); + } + +====1 +1:475a +2:568,581c +3:568,581c + @Test(expected = NullPointerException.class) + public void testSubtractNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(null, list); + } + + @Test(expected = NullPointerException.class) + public void testSubtractNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(list, null); + } + +====1 +1:541a +2:648,661c +3:648,661c + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(list, null); + } + +====1 +1:623a +2:744,789c +3:744,789c + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(null, list, e); + } + + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(list, null, e); + } + +====1 +1:645a +2:812,825c +3:812,825c + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(list, null); + } + +====3 +1:655c +2:835c + assertNull(CollectionUtils.find(null,testPredicate)); +3:835c + assertNull(CollectionUtils.find(null, testPredicate)); +====3 +1:1279c +2:1459c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long)input).intValue()); +3:1459c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long) input).intValue()); +====1 +1:1331a +2:1512,1516c +3:1512,1516c + @Test(expected = NullPointerException.class) + public void testAddIgnoreNullNullColl() { + CollectionUtils.addIgnoreNull(null, "1"); + } + +====1 +1:1338,1349c + try { + CollectionUtils.predicatedCollection(new ArrayList(), null); + fail("Expecting NullPointerException for null predicate."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.predicatedCollection(null, predicate); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1523,1534c +3:1523,1534c + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullColl() { + final Predicate predicate = PredicateUtils.instanceofPredicate(Integer.class); + CollectionUtils.predicatedCollection(null, predicate); + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullPredicate() { + final Collection list = new ArrayList<>(); + CollectionUtils.predicatedCollection(list, null); +====1 +1:1358,1362c + try { + CollectionUtils.isFull(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1542a +3:1542a +====1 +1:1372a +2:1553,1557c +3:1553,1557c + @Test(expected = NullPointerException.class) + public void testIsFullNullColl() { + CollectionUtils.isFull(null); + } + +====1 +1:1385,1389c + try { + CollectionUtils.maxSize(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1569a +3:1569a +====1 +1:1399a +2:1580,1584c +3:1580,1584c + @Test(expected = NullPointerException.class) + public void testMaxSizeNullColl() { + CollectionUtils.maxSize(null); + } + +====1 +1:1466a +2:1652c +3:1652c + } +====1 +1:1468,1472c + try { + CollectionUtils.retainAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1654,1665c +3:1654,1665c + @Test(expected = NullPointerException.class) + public void testRetainAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.retainAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRetainAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.retainAll(base, null); +====3 +1:1477c +2:1670c + List list = new ArrayList<>(); +3:1670c + final List list = new ArrayList<>(); +====1 +1:1493c + @Test(expected=IllegalArgumentException.class) +2:1686c +3:1686c + @Test(expected=NullPointerException.class) +==== +1:1495,1496c + Collection list = null; + Collection result = CollectionUtils.removeRange(list, 0, 0); +2:1688,1689c + Collection list = null; + CollectionUtils.removeRange(list, 0, 0); +3:1688,1689c + final Collection list = null; + CollectionUtils.removeRange(list, 0, 0); +====3 +1:1501c +2:1694c + Collection list = new ArrayList<>(); +3:1694c + final Collection list = new ArrayList<>(); +====1 +1:1503c + Collection result = CollectionUtils.removeRange(list, -1, 1); +2:1696c +3:1696c + CollectionUtils.removeRange(list, -1, 1); +====3 +1:1508c +2:1701c + Collection list = new ArrayList<>(); +3:1701c + final Collection list = new ArrayList<>(); +====1 +1:1510c + Collection result = CollectionUtils.removeRange(list, 0, -1); +2:1703c +3:1703c + CollectionUtils.removeRange(list, 0, -1); +====3 +1:1515c +2:1708c + Collection list = new ArrayList<>(); +3:1708c + final Collection list = new ArrayList<>(); +====1 +1:1518c + Collection result = CollectionUtils.removeRange(list, 1, 0); +2:1711c +3:1711c + CollectionUtils.removeRange(list, 1, 0); +====3 +1:1523c +2:1716c + Collection list = new ArrayList<>(); +3:1716c + final Collection list = new ArrayList<>(); +====1 +1:1525c + Collection result = CollectionUtils.removeRange(list, 0, 2); +2:1718c +3:1718c + CollectionUtils.removeRange(list, 0, 2); +====3 +1:1530c +2:1723c + List list = new ArrayList<>(); +3:1723c + final List list = new ArrayList<>(); +====1 +1:1559c + @Test(expected=IllegalArgumentException.class) +2:1752c +3:1752c + @Test(expected=NullPointerException.class) +==== +1:1561,1562c + Collection list = null; + Collection result = CollectionUtils.removeCount(list, 0, 1); +2:1754,1755c + Collection list = null; + CollectionUtils.removeCount(list, 0, 1); +3:1754,1755c + final Collection list = null; + CollectionUtils.removeCount(list, 0, 1); +==== +1:1567,1568c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, -1, 1); +2:1760,1761c + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); +3:1760,1761c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); +==== +1:1573,1574c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, 0, -1); +2:1766,1767c + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); +3:1766,1767c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); +====3 +1:1579c +2:1772c + Collection list = new ArrayList<>(); +3:1772c + final Collection list = new ArrayList<>(); +====1 +1:1581c + Collection result = CollectionUtils.removeCount(list, 0, 2); +2:1774c +3:1774c + CollectionUtils.removeCount(list, 0, 2); +====1 +1:1607a +2:1801c +3:1801c + } +====1 +1:1609,1613c + try { + CollectionUtils.removeAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1803,1814c +3:1803,1814c + @Test(expected = NullPointerException.class) + public void testRemoveAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.removeAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRemoveAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.removeAll(base, null); +====1 +1:1622,1633c + try { + CollectionUtils.transformingCollection(new ArrayList<>(), null); + fail("Expecting NullPointerException for null transformer."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.transformingCollection(null, transformer); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1823,1834c +3:1823,1834c + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullColl() { + final Transformer transformer = TransformerUtils.nopTransformer(); + CollectionUtils.transformingCollection(null, transformer); + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullTransformer() { + final List list = new ArrayList<>(); + CollectionUtils.transformingCollection(list, null); +====3 +1:1740c +2:1941c + CollectionUtils.addAll(collectionA, new Integer[]{5}); +3:1941c + CollectionUtils.addAll(collectionA, 5); +==== +1:1744c + @Test(expected=IndexOutOfBoundsException.class) +2:1945,1979c + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected=IndexOutOfBoundsException.class) +3:1945,1979c + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected = IndexOutOfBoundsException.class) +====3 +1:1746c +2:1981c + CollectionUtils.get((Object)collectionA, -3); +3:1981c + CollectionUtils.get((Object) collectionA, -3); +====3 +1:1749c +2:1984c + @Test(expected=IndexOutOfBoundsException.class) +3:1984c + @Test(expected = IndexOutOfBoundsException.class) +====3 +1:1751c +2:1986c + CollectionUtils.get((Object)collectionA.iterator(), 30); +3:1986c + CollectionUtils.get((Object) collectionA.iterator(), 30); +====3 +1:1754c +2:1989c + @Test(expected=IllegalArgumentException.class) +3:1989c + @Test(expected = IllegalArgumentException.class) +====3 +1:1756c +2:1991c + CollectionUtils.get((Object)null, 0); +3:1991c + CollectionUtils.get((Object) null, 0); +====3 +1:1761,1762c +2:1996,1997c + assertEquals(2, CollectionUtils.get((Object)collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object)collectionA.iterator(), 2)); +3:1996,1997c + assertEquals(2, CollectionUtils.get((Object) collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object) collectionA.iterator(), 2)); +====3 +1:1764c +2:1999c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object)map, 0)); +3:1999c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object) map, 0)); +====1 +1:1795a +2:2031,2035c +3:2031,2035c + @Test(expected = NullPointerException.class) + public void testReverseArrayNull() { + CollectionUtils.reverseArray(null); + } + +====1 +1:1828a +2:2069,2073c +3:2069,2073c + public void collateException0() { + CollectionUtils.collate(null, collectionC); + } + + @Test(expected=NullPointerException.class) diff --git a/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort/diff_CollectionUtils.java.txt b/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort/diff_CollectionUtils.java.txt new file mode 100644 index 0000000000..a61e64f7d0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort/diff_CollectionUtils.java.txt @@ -0,0 +1,985 @@ +====1 +1:30a +2:31c +3:31c + import java.util.Objects; +====1 +1:75c + public CardinalityHelper(final Iterable a, final Iterable b) { +2:76c +3:76c + CardinalityHelper(final Iterable a, final Iterable b) { +====1 +1:142c + public SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +2:143c +3:143c + SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +====1 +1:187c + * CollectionUtils should not normally be instantiated. +2:188c +3:188c + * {@code CollectionUtils} should not normally be instantiated. +====1 +1:205c + * Returns an immutable empty collection if the argument is null, +2:206c +3:206c + * Returns an immutable empty collection if the argument is {@code null}, +====1 +1:209,210c + * @param collection the collection, possibly null + * @return an empty collection if the argument is null +2:210,211c +3:210,211c + * @param collection the collection, possibly {@code null} + * @return an empty collection if the argument is {@code null} +====1 +1:232a +2:234,235c +3:234,235c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:257a +2:261,262c +3:261,262c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:287a +2:293,294c +3:293,294c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:340a +2:348,350c +3:348,350c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(p, "The predicate must not be null."); +====1 +1:357c + * Returns true iff all elements of {@code coll2} are also contained +2:367c +3:367c + * Returns {@code true} iff all elements of {@code coll2} are also contained +====1 +1:361c + * In other words, this method returns true iff the +2:371c +3:371c + * In other words, this method returns {@code true} iff the +====1 +1:376c + * @return true iff the intersection of the collections has the same cardinality +2:386c +3:386c + * @return {@code true} iff the intersection of the collections has the same cardinality +====1 +1:380a +2:391,392c +3:391,392c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====1 +1:409c + * Returns true iff at least one element is in both collections. +2:421c +3:421c + * Returns {@code true} iff at least one element is in both collections. +====1 +1:411c + * In other words, this method returns true iff the +2:423c +3:423c + * In other words, this method returns {@code true} iff the +====1 +1:415c + * @param the type of object to lookup in coll1. +2:427c +3:427c + * @param the type of object to lookup in {@code coll1}. +====1 +1:418c + * @return true iff the intersection of the collections is non-empty +2:430c +3:430c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:422a +2:435,436c +3:435,436c + Objects.requireNonNull(coll1, "The collection must not be null."); + Objects.requireNonNull(coll2, "The elements must not be null."); +====1 +1:440c + * Returns true iff at least one element is in both collections. +2:454c +3:454c + * Returns {@code true} iff at least one element is in both collections. +====1 +1:442c + * In other words, this method returns true iff the +2:456c +3:456c + * In other words, this method returns {@code true} iff the +====1 +1:448c + * @return true iff the intersection of the collections is non-empty +2:462c +3:462c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:452a +2:467,468c +3:467,468c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====1 +1:482a +2:499c +3:499c + Objects.requireNonNull(coll, "The collection must not be null."); +====1 +1:503c + * @return true iff a is a sub-collection of b +2:520c +3:520c + * @return {@code true} iff a is a sub-collection of b +====1 +1:507a +2:525,526c +3:525,526c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:528c + *
  • a.size() and b.size() represent the +2:547c +3:547c + *
  • {@code a.size()} and {@code b.size()} represent the +====1 +1:530c + *
  • a.size() < Integer.MAXVALUE
  • +2:549c +3:549c + *
  • {@code a.size() < Integer.MAXVALUE}
  • +====1 +1:535c + * @return true iff a is a proper sub-collection of b +2:554c +3:554c + * @return {@code true} iff a is a proper sub-collection of b +====1 +1:539a +2:559,560c +3:559,560c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:554c + * @return true iff the collections contain the same elements with the same cardinalities. +2:575c +3:575c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:557c + if(a.size() != b.size()) { +2:578,586c + <<<<<<< HEAD + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if(a.size() != b.size()) { + ||||||| 4551c3df1 + if(a.size() != b.size()) { + ======= + if (a.size() != b.size()) { + >>>>>>> TEMP_RIGHT_BRANCH +3:578,580c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if (a.size() != b.size()) { +====1 +1:561c + if(helper.cardinalityA.size() != helper.cardinalityB.size()) { +2:590c +3:584c + if (helper.cardinalityA.size() != helper.cardinalityB.size()) { +====1 +1:564,565c + for( final Object obj : helper.cardinalityA.keySet()) { + if(helper.freqA(obj) != helper.freqB(obj)) { +2:593,594c +3:587,588c + for (final Object obj : helper.cardinalityA.keySet()) { + if (helper.freqA(obj) != helper.freqB(obj)) { +====1 +1:591c + * @return true iff the collections contain the same elements with the same cardinalities. +2:620c +3:614c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:598,600c + if (equator == null) { + throw new NullPointerException("Equator must not be null."); + } +2:627,639c + <<<<<<< HEAD + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); + if(a.size() != b.size()) { + ||||||| 4551c3df1 + if (equator == null) { + throw new NullPointerException("Equator must not be null."); + } + + if(a.size() != b.size()) { + ======= + Objects.requireNonNull(equator, "equator"); +3:621,623c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "equator"); +==== +1:602c + if(a.size() != b.size()) { +2:641,642c + if (a.size() != b.size()) { + >>>>>>> TEMP_RIGHT_BRANCH +3:625c + if (a.size() != b.size()) { +====1 +1:626c + public EquatorWrapper(final Equator equator, final O object) { +2:666c +3:649c + EquatorWrapper(final Equator equator, final O object) { +====1 +1:655c + * @param coll the {@link Iterable} to search +2:695c +3:678c + * @param collection the {@link Iterable} to search +==== +1:663,667c + public static int cardinality(final O obj, final Iterable coll) { + if (coll == null) { + throw new NullPointerException("coll must not be null."); + } + return IterableUtils.frequency(coll, obj); +2:703,716c + <<<<<<< HEAD + public static int cardinality(final O obj, final Iterable coll) { + Objects.requireNonNull(coll, "The collection must not be null."); + return IterableUtils.frequency(coll, obj); + ||||||| 4551c3df1 + public static int cardinality(final O obj, final Iterable coll) { + if (coll == null) { + throw new NullPointerException("coll must not be null."); + } + return IterableUtils.frequency(coll, obj); + ======= + public static int cardinality(final O obj, final Iterable collection) { + return IterableUtils.frequency(Objects.requireNonNull(collection, "collection"), obj); + >>>>>>> TEMP_RIGHT_BRANCH +3:686,687c + public static int cardinality(final O obj, final Iterable collection) { + return IterableUtils.frequency(Objects.requireNonNull(collection, "collection"), obj); +====1 +1:799c + * This is equivalent to filter(collection, PredicateUtils.notPredicate(predicate)) +2:848c +3:819c + * This is equivalent to {@code filter(collection, PredicateUtils.notPredicate(predicate))} +====1 +1:856c + * A null collection or predicate matches no elements. +2:905c +3:876c + * A {@code null} collection or predicate matches no elements. +====1 +1:874c + * A null collection or predicate returns false. +2:923c +3:894c + * A {@code null} collection or predicate returns false. +====1 +1:893c + * A null predicate returns false. +2:942c +3:913c + * A {@code null} predicate returns false. +====1 +1:896c + * A null or empty collection returns true. +2:945c +3:916c + * A {@code null} or empty collection returns true. +====1 +1:916c + * A null predicate matches no elements. +2:965c +3:936c + * A {@code null} predicate matches no elements. +====1 +1:923c + * @throws NullPointerException if the input collection is null +2:971a +3:942a +====1 +1:927,928c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:975,979c +3:946,950c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====1 +1:965,966c + * Elements matching the predicate are added to the outputCollection, + * all other elements are added to the rejectedCollection. +2:1016,1017c +3:987,988c + * Elements matching the predicate are added to the {@code outputCollection}, + * all other elements are added to the {@code rejectedCollection}. +====1 +1:969,970c + * If the input predicate is null, no elements are added to + * outputCollection or rejectedCollection. +2:1020,1021c +3:991,992c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection} or {@code rejectedCollection}. +====1 +1:1010c + * If the input predicate is null, the result is an empty +2:1061c +3:1032c + * If the input predicate is {@code null}, the result is an empty +====1 +1:1018c + * @throws NullPointerException if the input collection is null +2:1068a +3:1039a +====1 +1:1022,1023c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1072,1076c +3:1043,1047c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====1 +1:1031,1032c + * If the input predicate is null, no elements are added to + * outputCollection. +2:1084,1085c +3:1055,1056c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection}. +====1 +1:1072,1073c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1125,1129c +3:1096,1100c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +==== +1:1165,1167c + if (collection == null) { + throw new NullPointerException("The collection must not be null"); + } +2:1221,1229c + <<<<<<< HEAD + Objects.requireNonNull(collection, "The collection must not be null."); + ||||||| 4551c3df1 + if (collection == null) { + throw new NullPointerException("The collection must not be null"); + } + ======= + Objects.requireNonNull(collection, "collection"); + >>>>>>> TEMP_RIGHT_BRANCH +3:1192c + Objects.requireNonNull(collection, "collection"); +====1 +1:1182a +2:1245,1246c +3:1208,1209c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterable, "The iterable of elements to add must not be null."); +====1 +1:1198a +2:1263,1264c +3:1226,1227c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterator, "The iterator of elements to add must not be null."); +====1 +1:1215a +2:1282,1283c +3:1245,1246c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(enumeration, "The enumeration of elements to add must not be null."); +====1 +1:1232a +2:1301,1302c +3:1264,1265c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(elements, "The array of elements to add must not be null."); +====1 +1:1241,1242c + * Returns the index-th value in {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +2:1311,1312c +3:1274,1275c + * Returns the {@code index}-th value in {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1244,1245c + * The Iterator is advanced to index (or to the end, if + * index exceeds the number of entries) as a side effect of this method. +2:1314,1315c +3:1277,1278c + * The Iterator is advanced to {@code index} (or to the end, if + * {@code index} exceeds the number of entries) as a side effect of this method. +====1 +1:1257a +2:1328c +3:1291c + Objects.requireNonNull(iterator, "The iterator must not be null."); +====1 +1:1273,1274c + * Returns the index-th value in the iterable's {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +2:1344,1345c +3:1307,1308c + * Returns the {@code index}-th value in the {@code iterable}'s {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1287a +2:1359c +3:1322c + Objects.requireNonNull(iterable, "The iterable must not be null."); +====1 +1:1292,1294c + * Returns the index-th value in object, throwing + * IndexOutOfBoundsException if there is no such element or + * IllegalArgumentException if object is not an +2:1364,1366c +3:1327,1329c + * Returns the {@code index}-th value in {@code object}, throwing + * {@code IndexOutOfBoundsException} if there is no such element or + * {@code IllegalArgumentException} if {@code object} is not an +====1 +1:1300,1301c + *
  • Map -- the value returned is the Map.Entry in position + * index in the map's entrySet iterator, +2:1372,1373c +3:1335,1336c + *
  • Map -- the value returned is the {@code Map.Entry} in position + * {@code index} in the map's {@code entrySet} iterator, +====1 +1:1304,1305c + *
  • Array -- the index-th array entry is returned, + * if there is such an entry; otherwise an IndexOutOfBoundsException +2:1376,1377c +3:1339,1340c + *
  • Array -- the {@code index}-th array entry is returned, + * if there is such an entry; otherwise an {@code IndexOutOfBoundsException} +====1 +1:1307c + *
  • Collection -- the value returned is the index-th object +2:1379c +3:1342c + *
  • Collection -- the value returned is the {@code index}-th object +====1 +1:1310c + * index-th object in the Iterator/Enumeration, if there +2:1382c +3:1345c + * {@code index}-th object in the Iterator/Enumeration, if there +====1 +1:1312c + * index (or to the end, if index exceeds the +2:1384c +3:1347c + * {@code index} (or to the end, if {@code index} exceeds the +====1 +1:1327c + if (object instanceof Map) { +2:1399c +3:1362c + if (object instanceof Map) { +====1 +1:1354,1355c + * Returns the index-th Map.Entry in the map's entrySet, + * throwing IndexOutOfBoundsException if there is no such element. +2:1426,1427c +3:1389,1390c + * Returns the {@code index}-th {@code Map.Entry} in the {@code map}'s {@code entrySet}, + * throwing {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1358c + * @param the key type in the {@link Map} +2:1430c +3:1393c + * @param the value type in the {@link Map} +==== +1:1364c + public static Map.Entry get(final Map map, final int index) { +2:1436,1442c + public static Map.Entry get(final Map map, final int index) { + <<<<<<< HEAD + Objects.requireNonNull(map, "The map must not be null."); + ||||||| 4551c3df1 + public static Map.Entry get(final Map map, final int index) { + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:1399,1400c + public static Map.Entry get(final Map map, final int index) { + Objects.requireNonNull(map, "The map must not be null."); +====1 +1:1392c + if (object instanceof Map) { +2:1470c +3:1428c + if (object instanceof Map) { +====1 +1:1499a +2:1578c +3:1536c + Objects.requireNonNull(array, "The array must not be null."); +====1 +1:1526c + * @param coll the collection to check +2:1605c +3:1563c + * @param collection the collection to check +==== +1:1530,1535c + public static boolean isFull(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); +2:1609,1626c + <<<<<<< HEAD + public static boolean isFull(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); + ||||||| 4551c3df1 + public static boolean isFull(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); + ======= + public static boolean isFull(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).isFull(); + >>>>>>> TEMP_RIGHT_BRANCH +3:1567,1570c + public static boolean isFull(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).isFull(); +====1 +1:1539c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +2:1630c +3:1574c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====1 +1:1559c + * @param coll the collection to check +2:1650c +3:1594c + * @param collection the collection to check +==== +1:1563,1568c + public static int maxSize(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); +2:1654,1671c + <<<<<<< HEAD + public static int maxSize(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); + ||||||| 4551c3df1 + public static int maxSize(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); + ======= + public static int maxSize(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).maxSize(); + >>>>>>> TEMP_RIGHT_BRANCH +3:1598,1601c + public static int maxSize(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).maxSize(); +====1 +1:1572c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +2:1675c +3:1605c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====1 +1:1649,1651c + * @param a the first collection, must not be null + * @param b the second collection, must not be null + * @param c the comparator to use for the merge. +2:1752,1754c +3:1682,1684c + * @param iterableA the first collection, must not be null + * @param iterableB the second collection, must not be null + * @param comparator the comparator to use for the merge. +==== +1:1658,1659c + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { +2:1761,1779c + <<<<<<< HEAD + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(c, "The comparator must not be null."); + ||||||| 4551c3df1 + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + + if (a == null || b == null) { + throw new NullPointerException("The collections must not be null"); + } + if (c == null) { + throw new NullPointerException("The comparator must not be null"); + } + ======= + public static List collate(final Iterable iterableA, final Iterable iterableB, + final Comparator comparator, final boolean includeDuplicates) { +3:1691,1692c + public static List collate(final Iterable iterableA, final Iterable iterableB, + final Comparator comparator, final boolean includeDuplicates) { +==== +1:1661,1666c + if (a == null || b == null) { + throw new NullPointerException("The collections must not be null"); + } + if (c == null) { + throw new NullPointerException("The comparator must not be null"); + } +2:1781,1784c + Objects.requireNonNull(iterableA, "iterableA"); + Objects.requireNonNull(iterableB, "iterableB"); + Objects.requireNonNull(comparator, "comparator"); + >>>>>>> TEMP_RIGHT_BRANCH +3:1694,1696c + Objects.requireNonNull(iterableA, "iterableA"); + Objects.requireNonNull(iterableB, "iterableB"); + Objects.requireNonNull(comparator, "comparator"); +====1 +1:1669,1670c + final int totalSize = a instanceof Collection && b instanceof Collection ? + Math.max(1, ((Collection) a).size() + ((Collection) b).size()) : 10; +2:1787,1788c +3:1699,1700c + final int totalSize = iterableA instanceof Collection && iterableB instanceof Collection ? + Math.max(1, ((Collection) iterableA).size() + ((Collection) iterableB).size()) : 10; +====1 +1:1672c + final Iterator iterator = new CollatingIterator<>(c, a.iterator(), b.iterator()); +2:1790c +3:1702c + final Iterator iterator = new CollatingIterator<>(comparator, iterableA.iterator(), iterableB.iterator()); +====1 +1:1713a +2:1832c +3:1744c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1724,1727c + * Returns a collection containing all the elements in collection + * that are also in retain. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless retain does not contain e, in which +2:1843,1846c +3:1755,1758c + * Returns a collection containing all the elements in {@code collection} + * that are also in {@code retain}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code retain} does not contain {@code e}, in which +====1 +1:1729c + * the collection c and thus cannot call c.retainAll(retain);. +2:1848c +3:1760c + * the collection {@code c} and thus cannot call {@code c.retainAll(retain);}. +====1 +1:1731,1732c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in retain. If it's contained, it's added +2:1850,1851c +3:1762,1763c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code retain}. If it's contained, it's added +====1 +1:1734c + * retain that provides a fast (e.g. O(1)) implementation of +2:1853c +3:1765c + * {@code retain} that provides a fast (e.g. O(1)) implementation of +====1 +1:1741,1742c + * @return a Collection containing all the elements of collection + * that occur at least once in retain. +2:1860,1861c +3:1772,1773c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain}. +====1 +1:1746a +2:1866,1867c +3:1778,1779c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); +====1 +1:1752,1755c + * collection that are also in retain. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless retain does not contain e, in which case +2:1873,1876c +3:1785,1788c + * {@code collection} that are also in {@code retain}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code retain} does not contain {@code e}, in which case +====1 +1:1757,1758c + * modify the collection c and thus cannot call + * c.retainAll(retain);. +2:1878,1879c +3:1790,1791c + * modify the collection {@code c} and thus cannot call + * {@code c.retainAll(retain);}. +====1 +1:1762c + * in collection and retain. Hence this method is +2:1883c +3:1795c + * in {@code collection} and {@code retain}. Hence this method is +====1 +1:1771,1772c + * @return a Collection containing all the elements of collection + * that occur at least once in retain according to the equator +2:1892,1893c +3:1804,1805c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain} according to the {@code equator} +====1 +1:1779c + +2:1900,1902c +3:1812,1814c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1800,1802c + * @param input the collection will be operated, can't be null + * @param startIndex the start index (inclusive) to remove element, can't be less than 0 + * @param endIndex the end index (exclusive) to remove, can't be less than startIndex +2:1923,1925c +3:1835,1837c + * @param input the collection will be operated, must not be null + * @param startIndex the start index (inclusive) to remove element, must not be less than 0 + * @param endIndex the end index (exclusive) to remove, must not be less than startIndex +====1 +1:1807,1809c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1930c +3:1842c + Objects.requireNonNull(input, "The collection must not be null."); +====1 +1:1831,1833c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1952c +3:1864c + Objects.requireNonNull(input, "The collection must not be null."); +====1 +1:1845,1846c + Collection result = new ArrayList(count); + Iterator iterator = input.iterator(); +2:1964,1965c +3:1876,1877c + final Collection result = new ArrayList<>(count); + final Iterator iterator = input.iterator(); +====1 +1:1861,1865c + * Removes the elements in remove from collection. That is, this + * method returns a collection containing all the elements in c + * that are not in remove. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless remove contains e, in which +2:1980,1984c +3:1892,1896c + * Removes the elements in {@code remove} from {@code collection}. That is, this + * method returns a collection containing all the elements in {@code c} + * that are not in {@code remove}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code remove} contains {@code e}, in which +====1 +1:1867c + * the collection c and thus cannot call collection.removeAll(remove);. +2:1986c +3:1898c + * the collection {@code c} and thus cannot call {@code collection.removeAll(remove);}. +====1 +1:1869,1870c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in remove. If it's not contained, it's added +2:1988,1989c +3:1900,1901c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code remove}. If it's not contained, it's added +====1 +1:1872c + * remove that provides a fast (e.g. O(1)) implementation of +2:1991c +3:1903c + * {@code remove} that provides a fast (e.g. O(1)) implementation of +====1 +1:1878,1880c + * @param remove the items to be removed from the returned collection + * @return a Collection containing all the elements of collection except + * any elements that also occur in remove. +2:1997,1999c +3:1909,1911c + * @param remove the items to be removed from the returned {@code collection} + * @return a {@code Collection} containing all the elements of {@code collection} except + * any elements that also occur in {@code remove}. +====1 +1:1886c + } +2:2005c +3:1917c + } +====1 +1:1889c + * Removes all elements in remove from collection. +2:2008c +3:1920c + * Removes all elements in {@code remove} from {@code collection}. +====1 +1:1891,1894c + * collection that are not in remove. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless remove contains e, in which case the +2:2010,2013c +3:1922,1925c + * {@code collection} that are not in {@code remove}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code remove} contains {@code e}, in which case the +====1 +1:1896,1897c + * the collection c and thus cannot call + * collection.removeAll(remove). +2:2015,2016c +3:1927,1928c + * the collection {@code c} and thus cannot call + * {@code collection.removeAll(remove)}. +====1 +1:1901c + * in collection and remove. Hence this method is +2:2020c +3:1932c + * in {@code collection} and {@code remove}. Hence this method is +====1 +1:1910,1911c + * @return a Collection containing all the elements of collection + * except any element that if equal according to the equator +2:2029,2030c +3:1941,1942c + * @return a {@code Collection} containing all the elements of {@code collection} + * except any element that if equal according to the {@code equator} +====1 +1:1918c + +2:2037,2039c +3:1949,1951c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(remove, "The items to be removed must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1960a +2:2082c +3:1994c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1977a +2:2100c +3:2012c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1997a +2:2121,2122c +3:2033,2034c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(predicate, "The predicate must not be null."); +====1 +1:2020a +2:2146,2147c +3:2058,2059c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(transformer, "The transformer must not be null."); +==== +1:2035,2037c + if (collection == null) { + throw new NullPointerException("Collection must not be null."); + } +2:2162,2170c + <<<<<<< HEAD + Objects.requireNonNull(collection, "The collection must not be null."); + ||||||| 4551c3df1 + if (collection == null) { + throw new NullPointerException("Collection must not be null."); + } + ======= + Objects.requireNonNull(collection, "collection"); + >>>>>>> TEMP_RIGHT_BRANCH +3:2074c + Objects.requireNonNull(collection, "collection"); diff --git a/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort/diff_CollectionUtilsTest.java.txt b/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort/diff_CollectionUtilsTest.java.txt new file mode 100644 index 0000000000..b98dcdbd96 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort/diff_CollectionUtilsTest.java.txt @@ -0,0 +1,725 @@ +====1 +1:193a +2:194,198c +3:194,198c + @Test(expected = NullPointerException.class) + public void testGetCardinalityMapNull() { + CollectionUtils.getCardinalityMap(null); + } + +====1 +1:349a +2:355,377c +3:355,377c + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.containsAny(null, list); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl3() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:383a +2:412,433c +3:412,433c + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl1() { + final String[] oneArr = {"1"}; + CollectionUtils.containsAny(null, oneArr); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullArray() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:402a +2:453,466c +3:453,466c + @Test(expected = NullPointerException.class) + public void testUnionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(null, list); + } + + @Test(expected = NullPointerException.class) + public void testUnionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(list, null); + } + +====1 +1:421a +2:486,499c +3:486,499c + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(list, null); + } + +====1 +1:440a +2:519,532c +3:519,532c + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(null, list); + } + + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(list, null); + } + +====1 +1:475a +2:568,581c +3:568,581c + @Test(expected = NullPointerException.class) + public void testSubtractNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(null, list); + } + + @Test(expected = NullPointerException.class) + public void testSubtractNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(list, null); + } + +====1 +1:541a +2:648,661c +3:648,661c + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(list, null); + } + +====1 +1:623a +2:744,789c +3:744,789c + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(null, list, e); + } + + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(list, null, e); + } + +====1 +1:645a +2:812,825c +3:812,825c + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(list, null); + } + +====1 +1:655c + assertNull(CollectionUtils.find(null,testPredicate)); +2:835c +3:835c + assertNull(CollectionUtils.find(null, testPredicate)); +====1 +1:1279c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long)input).intValue()); +2:1459c +3:1459c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long) input).intValue()); +====1 +1:1331a +2:1512,1516c +3:1512,1516c + @Test(expected = NullPointerException.class) + public void testAddIgnoreNullNullColl() { + CollectionUtils.addIgnoreNull(null, "1"); + } + +====1 +1:1338,1349c + try { + CollectionUtils.predicatedCollection(new ArrayList(), null); + fail("Expecting NullPointerException for null predicate."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.predicatedCollection(null, predicate); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1523,1534c +3:1523,1534c + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullColl() { + final Predicate predicate = PredicateUtils.instanceofPredicate(Integer.class); + CollectionUtils.predicatedCollection(null, predicate); + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullPredicate() { + final Collection list = new ArrayList<>(); + CollectionUtils.predicatedCollection(list, null); +====1 +1:1358,1362c + try { + CollectionUtils.isFull(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1542a +3:1542a +====1 +1:1372a +2:1553,1557c +3:1553,1557c + @Test(expected = NullPointerException.class) + public void testIsFullNullColl() { + CollectionUtils.isFull(null); + } + +====1 +1:1385,1389c + try { + CollectionUtils.maxSize(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1569a +3:1569a +====1 +1:1399a +2:1580,1584c +3:1580,1584c + @Test(expected = NullPointerException.class) + public void testMaxSizeNullColl() { + CollectionUtils.maxSize(null); + } + +====1 +1:1466a +2:1652c +3:1652c + } +====1 +1:1468,1472c + try { + CollectionUtils.retainAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1654,1665c +3:1654,1665c + @Test(expected = NullPointerException.class) + public void testRetainAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.retainAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRetainAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.retainAll(base, null); +====1 +1:1477c + List list = new ArrayList<>(); +2:1670c +3:1670c + final List list = new ArrayList<>(); +====1 +1:1493c + @Test(expected=IllegalArgumentException.class) +2:1686c +3:1686c + @Test(expected=NullPointerException.class) +==== +1:1495,1496c + Collection list = null; + Collection result = CollectionUtils.removeRange(list, 0, 0); +2:1688,1697c + <<<<<<< HEAD + Collection list = null; + CollectionUtils.removeRange(list, 0, 0); + ||||||| 4551c3df1 + Collection list = null; + Collection result = CollectionUtils.removeRange(list, 0, 0); + ======= + final Collection list = null; + final Collection result = CollectionUtils.removeRange(list, 0, 0); + >>>>>>> TEMP_RIGHT_BRANCH +3:1688,1689c + final Collection list = null; + CollectionUtils.removeRange(list, 0, 0); +====1 +1:1501c + Collection list = new ArrayList<>(); +2:1702c +3:1694c + final Collection list = new ArrayList<>(); +==== +1:1503c + Collection result = CollectionUtils.removeRange(list, -1, 1); +2:1704,1710c + <<<<<<< HEAD + CollectionUtils.removeRange(list, -1, 1); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, -1, 1); + ======= + final Collection result = CollectionUtils.removeRange(list, -1, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1696c + CollectionUtils.removeRange(list, -1, 1); +====1 +1:1508c + Collection list = new ArrayList<>(); +2:1715c +3:1701c + final Collection list = new ArrayList<>(); +==== +1:1510c + Collection result = CollectionUtils.removeRange(list, 0, -1); +2:1717,1723c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 0, -1); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 0, -1); + ======= + final Collection result = CollectionUtils.removeRange(list, 0, -1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1703c + CollectionUtils.removeRange(list, 0, -1); +====1 +1:1515c + Collection list = new ArrayList<>(); +2:1728c +3:1708c + final Collection list = new ArrayList<>(); +==== +1:1518c + Collection result = CollectionUtils.removeRange(list, 1, 0); +2:1731,1737c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 1, 0); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 1, 0); + ======= + final Collection result = CollectionUtils.removeRange(list, 1, 0); + >>>>>>> TEMP_RIGHT_BRANCH +3:1711c + CollectionUtils.removeRange(list, 1, 0); +====1 +1:1523c + Collection list = new ArrayList<>(); +2:1742c +3:1716c + final Collection list = new ArrayList<>(); +==== +1:1525c + Collection result = CollectionUtils.removeRange(list, 0, 2); +2:1744,1750c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 0, 2); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 0, 2); + ======= + final Collection result = CollectionUtils.removeRange(list, 0, 2); + >>>>>>> TEMP_RIGHT_BRANCH +3:1718c + CollectionUtils.removeRange(list, 0, 2); +====1 +1:1530c + List list = new ArrayList<>(); +2:1755c +3:1723c + final List list = new ArrayList<>(); +====1 +1:1559c + @Test(expected=IllegalArgumentException.class) +2:1784c +3:1752c + @Test(expected=NullPointerException.class) +==== +1:1561,1562c + Collection list = null; + Collection result = CollectionUtils.removeCount(list, 0, 1); +2:1786,1795c + <<<<<<< HEAD + Collection list = null; + CollectionUtils.removeCount(list, 0, 1); + ||||||| 4551c3df1 + Collection list = null; + Collection result = CollectionUtils.removeCount(list, 0, 1); + ======= + final Collection list = null; + final Collection result = CollectionUtils.removeCount(list, 0, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1754,1755c + final Collection list = null; + CollectionUtils.removeCount(list, 0, 1); +==== +1:1567,1568c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, -1, 1); +2:1800,1809c + <<<<<<< HEAD + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); + ||||||| 4551c3df1 + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, -1, 1); + ======= + final Collection list = new ArrayList<>(); + final Collection result = CollectionUtils.removeCount(list, -1, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1760,1761c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); +==== +1:1573,1574c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, 0, -1); +2:1814,1823c + <<<<<<< HEAD + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); + ||||||| 4551c3df1 + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, 0, -1); + ======= + final Collection list = new ArrayList<>(); + final Collection result = CollectionUtils.removeCount(list, 0, -1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1766,1767c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); +====1 +1:1579c + Collection list = new ArrayList<>(); +2:1828c +3:1772c + final Collection list = new ArrayList<>(); +==== +1:1581c + Collection result = CollectionUtils.removeCount(list, 0, 2); +2:1830,1836c + <<<<<<< HEAD + CollectionUtils.removeCount(list, 0, 2); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeCount(list, 0, 2); + ======= + final Collection result = CollectionUtils.removeCount(list, 0, 2); + >>>>>>> TEMP_RIGHT_BRANCH +3:1774c + CollectionUtils.removeCount(list, 0, 2); +====1 +1:1607a +2:1863c +3:1801c + } +====1 +1:1609,1613c + try { + CollectionUtils.removeAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1865,1876c +3:1803,1814c + @Test(expected = NullPointerException.class) + public void testRemoveAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.removeAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRemoveAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.removeAll(base, null); +====1 +1:1622,1633c + try { + CollectionUtils.transformingCollection(new ArrayList<>(), null); + fail("Expecting NullPointerException for null transformer."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.transformingCollection(null, transformer); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1885,1896c +3:1823,1834c + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullColl() { + final Transformer transformer = TransformerUtils.nopTransformer(); + CollectionUtils.transformingCollection(null, transformer); + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullTransformer() { + final List list = new ArrayList<>(); + CollectionUtils.transformingCollection(list, null); +====1 +1:1740c + CollectionUtils.addAll(collectionA, new Integer[]{5}); +2:2003c +3:1941c + CollectionUtils.addAll(collectionA, 5); +==== +1:1744c + @Test(expected=IndexOutOfBoundsException.class) +2:2007,2047c + <<<<<<< HEAD + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected=IndexOutOfBoundsException.class) + ||||||| 4551c3df1 + @Test(expected=IndexOutOfBoundsException.class) + ======= + @Test(expected = IndexOutOfBoundsException.class) + >>>>>>> TEMP_RIGHT_BRANCH +3:1945,1979c + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected = IndexOutOfBoundsException.class) +====1 +1:1746c + CollectionUtils.get((Object)collectionA, -3); +2:2049c +3:1981c + CollectionUtils.get((Object) collectionA, -3); +====1 +1:1749c + @Test(expected=IndexOutOfBoundsException.class) +2:2052c +3:1984c + @Test(expected = IndexOutOfBoundsException.class) +====1 +1:1751c + CollectionUtils.get((Object)collectionA.iterator(), 30); +2:2054c +3:1986c + CollectionUtils.get((Object) collectionA.iterator(), 30); +====1 +1:1754c + @Test(expected=IllegalArgumentException.class) +2:2057c +3:1989c + @Test(expected = IllegalArgumentException.class) +====1 +1:1756c + CollectionUtils.get((Object)null, 0); +2:2059c +3:1991c + CollectionUtils.get((Object) null, 0); +====1 +1:1761,1762c + assertEquals(2, CollectionUtils.get((Object)collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object)collectionA.iterator(), 2)); +2:2064,2065c +3:1996,1997c + assertEquals(2, CollectionUtils.get((Object) collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object) collectionA.iterator(), 2)); +====1 +1:1764c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object)map, 0)); +2:2067c +3:1999c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object) map, 0)); +====1 +1:1795a +2:2099,2103c +3:2031,2035c + @Test(expected = NullPointerException.class) + public void testReverseArrayNull() { + CollectionUtils.reverseArray(null); + } + +====1 +1:1828a +2:2137,2141c +3:2069,2073c + public void collateException0() { + CollectionUtils.collate(null, collectionC); + } + + @Test(expected=NullPointerException.class) diff --git a/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_adjacent/diff_CollectionUtils.java.txt b/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_adjacent/diff_CollectionUtils.java.txt new file mode 100644 index 0000000000..9187319c0a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_adjacent/diff_CollectionUtils.java.txt @@ -0,0 +1,884 @@ +====1 +1:30a +2:31c +3:31c + import java.util.Objects; +====3 +1:75c +2:76c + public CardinalityHelper(final Iterable a, final Iterable b) { +3:76c + CardinalityHelper(final Iterable a, final Iterable b) { +====3 +1:142c +2:143c + public SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +3:143c + SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +====3 +1:187c +2:188c + * CollectionUtils should not normally be instantiated. +3:188c + * {@code CollectionUtils} should not normally be instantiated. +====3 +1:205c +2:206c + * Returns an immutable empty collection if the argument is null, +3:206c + * Returns an immutable empty collection if the argument is {@code null}, +====3 +1:209,210c +2:210,211c + * @param collection the collection, possibly null + * @return an empty collection if the argument is null +3:210,211c + * @param collection the collection, possibly {@code null} + * @return an empty collection if the argument is {@code null} +====1 +1:232a +2:234,235c +3:234,235c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:257a +2:261,262c +3:261,262c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:287a +2:293,294c +3:293,294c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:340a +2:348,350c +3:348,350c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(p, "The predicate must not be null."); +====3 +1:357c +2:367c + * Returns true iff all elements of {@code coll2} are also contained +3:367c + * Returns {@code true} iff all elements of {@code coll2} are also contained +====3 +1:361c +2:371c + * In other words, this method returns true iff the +3:371c + * In other words, this method returns {@code true} iff the +====3 +1:376c +2:386c + * @return true iff the intersection of the collections has the same cardinality +3:386c + * @return {@code true} iff the intersection of the collections has the same cardinality +====1 +1:380a +2:391,392c +3:391,392c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====3 +1:409c +2:421c + * Returns true iff at least one element is in both collections. +3:421c + * Returns {@code true} iff at least one element is in both collections. +====3 +1:411c +2:423c + * In other words, this method returns true iff the +3:423c + * In other words, this method returns {@code true} iff the +====3 +1:415c +2:427c + * @param the type of object to lookup in coll1. +3:427c + * @param the type of object to lookup in {@code coll1}. +====3 +1:418c +2:430c + * @return true iff the intersection of the collections is non-empty +3:430c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:422a +2:435,436c +3:435,436c + Objects.requireNonNull(coll1, "The collection must not be null."); + Objects.requireNonNull(coll2, "The elements must not be null."); +====3 +1:440c +2:454c + * Returns true iff at least one element is in both collections. +3:454c + * Returns {@code true} iff at least one element is in both collections. +====3 +1:442c +2:456c + * In other words, this method returns true iff the +3:456c + * In other words, this method returns {@code true} iff the +====3 +1:448c +2:462c + * @return true iff the intersection of the collections is non-empty +3:462c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:452a +2:467,468c +3:467,468c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====1 +1:482a +2:499c +3:499c + Objects.requireNonNull(coll, "The collection must not be null."); +====3 +1:503c +2:520c + * @return true iff a is a sub-collection of b +3:520c + * @return {@code true} iff a is a sub-collection of b +====1 +1:507a +2:525,526c +3:525,526c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====3 +1:528c +2:547c + *
  • a.size() and b.size() represent the +3:547c + *
  • {@code a.size()} and {@code b.size()} represent the +====3 +1:530c +2:549c + *
  • a.size() < Integer.MAXVALUE
  • +3:549c + *
  • {@code a.size() < Integer.MAXVALUE}
  • +====3 +1:535c +2:554c + * @return true iff a is a proper sub-collection of b +3:554c + * @return {@code true} iff a is a proper sub-collection of b +====1 +1:539a +2:559,560c +3:559,560c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====3 +1:554c +2:575c + * @return true iff the collections contain the same elements with the same cardinalities. +3:575c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:557c + if(a.size() != b.size()) { +2:578,580c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if(a.size() != b.size()) { +3:578,580c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if (a.size() != b.size()) { +====3 +1:561c +2:584c + if(helper.cardinalityA.size() != helper.cardinalityB.size()) { +3:584c + if (helper.cardinalityA.size() != helper.cardinalityB.size()) { +====3 +1:564,565c +2:587,588c + for( final Object obj : helper.cardinalityA.keySet()) { + if(helper.freqA(obj) != helper.freqB(obj)) { +3:587,588c + for (final Object obj : helper.cardinalityA.keySet()) { + if (helper.freqA(obj) != helper.freqB(obj)) { +====3 +1:591c +2:614c + * @return true iff the collections contain the same elements with the same cardinalities. +3:614c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:598,602c + if (equator == null) { + throw new NullPointerException("Equator must not be null."); + } + + if(a.size() != b.size()) { +2:621,624c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); + if(a.size() != b.size()) { +3:621,625c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "equator"); + + if (a.size() != b.size()) { +====3 +1:626c +2:648c + public EquatorWrapper(final Equator equator, final O object) { +3:649c + EquatorWrapper(final Equator equator, final O object) { +====3 +1:655c +2:677c + * @param coll the {@link Iterable} to search +3:678c + * @param collection the {@link Iterable} to search +==== +1:663,667c + public static int cardinality(final O obj, final Iterable coll) { + if (coll == null) { + throw new NullPointerException("coll must not be null."); + } + return IterableUtils.frequency(coll, obj); +2:685,687c + public static int cardinality(final O obj, final Iterable coll) { + Objects.requireNonNull(coll, "The collection must not be null."); + return IterableUtils.frequency(coll, obj); +3:686,687c + public static int cardinality(final O obj, final Iterable collection) { + return IterableUtils.frequency(Objects.requireNonNull(collection, "collection"), obj); +====3 +1:799c +2:819c + * This is equivalent to filter(collection, PredicateUtils.notPredicate(predicate)) +3:819c + * This is equivalent to {@code filter(collection, PredicateUtils.notPredicate(predicate))} +====3 +1:856c +2:876c + * A null collection or predicate matches no elements. +3:876c + * A {@code null} collection or predicate matches no elements. +====3 +1:874c +2:894c + * A null collection or predicate returns false. +3:894c + * A {@code null} collection or predicate returns false. +====3 +1:893c +2:913c + * A null predicate returns false. +3:913c + * A {@code null} predicate returns false. +====3 +1:896c +2:916c + * A null or empty collection returns true. +3:916c + * A {@code null} or empty collection returns true. +====3 +1:916c +2:936c + * A null predicate matches no elements. +3:936c + * A {@code null} predicate matches no elements. +====1 +1:923c + * @throws NullPointerException if the input collection is null +2:942a +3:942a +====1 +1:927,928c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:946,950c +3:946,950c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====3 +1:965,966c +2:987,988c + * Elements matching the predicate are added to the outputCollection, + * all other elements are added to the rejectedCollection. +3:987,988c + * Elements matching the predicate are added to the {@code outputCollection}, + * all other elements are added to the {@code rejectedCollection}. +====3 +1:969,970c +2:991,992c + * If the input predicate is null, no elements are added to + * outputCollection or rejectedCollection. +3:991,992c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection} or {@code rejectedCollection}. +====3 +1:1010c +2:1032c + * If the input predicate is null, the result is an empty +3:1032c + * If the input predicate is {@code null}, the result is an empty +====1 +1:1018c + * @throws NullPointerException if the input collection is null +2:1039a +3:1039a +====1 +1:1022,1023c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1043,1047c +3:1043,1047c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====3 +1:1031,1032c +2:1055,1056c + * If the input predicate is null, no elements are added to + * outputCollection. +3:1055,1056c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection}. +====1 +1:1072,1073c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1096,1100c +3:1096,1100c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +==== +1:1165,1167c + if (collection == null) { + throw new NullPointerException("The collection must not be null"); + } +2:1192c + Objects.requireNonNull(collection, "The collection must not be null."); +3:1192c + Objects.requireNonNull(collection, "collection"); +====1 +1:1182a +2:1208,1209c +3:1208,1209c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterable, "The iterable of elements to add must not be null."); +====1 +1:1198a +2:1226,1227c +3:1226,1227c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterator, "The iterator of elements to add must not be null."); +====1 +1:1215a +2:1245,1246c +3:1245,1246c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(enumeration, "The enumeration of elements to add must not be null."); +====1 +1:1232a +2:1264,1265c +3:1264,1265c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(elements, "The array of elements to add must not be null."); +====3 +1:1241,1242c +2:1274,1275c + * Returns the index-th value in {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +3:1274,1275c + * Returns the {@code index}-th value in {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====3 +1:1244,1245c +2:1277,1278c + * The Iterator is advanced to index (or to the end, if + * index exceeds the number of entries) as a side effect of this method. +3:1277,1278c + * The Iterator is advanced to {@code index} (or to the end, if + * {@code index} exceeds the number of entries) as a side effect of this method. +====1 +1:1257a +2:1291c +3:1291c + Objects.requireNonNull(iterator, "The iterator must not be null."); +====3 +1:1273,1274c +2:1307,1308c + * Returns the index-th value in the iterable's {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +3:1307,1308c + * Returns the {@code index}-th value in the {@code iterable}'s {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1287a +2:1322c +3:1322c + Objects.requireNonNull(iterable, "The iterable must not be null."); +====3 +1:1292,1294c +2:1327,1329c + * Returns the index-th value in object, throwing + * IndexOutOfBoundsException if there is no such element or + * IllegalArgumentException if object is not an +3:1327,1329c + * Returns the {@code index}-th value in {@code object}, throwing + * {@code IndexOutOfBoundsException} if there is no such element or + * {@code IllegalArgumentException} if {@code object} is not an +====3 +1:1300,1301c +2:1335,1336c + *
  • Map -- the value returned is the Map.Entry in position + * index in the map's entrySet iterator, +3:1335,1336c + *
  • Map -- the value returned is the {@code Map.Entry} in position + * {@code index} in the map's {@code entrySet} iterator, +====3 +1:1304,1305c +2:1339,1340c + *
  • Array -- the index-th array entry is returned, + * if there is such an entry; otherwise an IndexOutOfBoundsException +3:1339,1340c + *
  • Array -- the {@code index}-th array entry is returned, + * if there is such an entry; otherwise an {@code IndexOutOfBoundsException} +====3 +1:1307c +2:1342c + *
  • Collection -- the value returned is the index-th object +3:1342c + *
  • Collection -- the value returned is the {@code index}-th object +====3 +1:1310c +2:1345c + * index-th object in the Iterator/Enumeration, if there +3:1345c + * {@code index}-th object in the Iterator/Enumeration, if there +====3 +1:1312c +2:1347c + * index (or to the end, if index exceeds the +3:1347c + * {@code index} (or to the end, if {@code index} exceeds the +====3 +1:1327c +2:1362c + if (object instanceof Map) { +3:1362c + if (object instanceof Map) { +====3 +1:1354,1355c +2:1389,1390c + * Returns the index-th Map.Entry in the map's entrySet, + * throwing IndexOutOfBoundsException if there is no such element. +3:1389,1390c + * Returns the {@code index}-th {@code Map.Entry} in the {@code map}'s {@code entrySet}, + * throwing {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1358c + * @param the key type in the {@link Map} +2:1393c +3:1393c + * @param the value type in the {@link Map} +====1 +1:1364c + public static Map.Entry get(final Map map, final int index) { +2:1399,1400c +3:1399,1400c + public static Map.Entry get(final Map map, final int index) { + Objects.requireNonNull(map, "The map must not be null."); +====3 +1:1392c +2:1428c + if (object instanceof Map) { +3:1428c + if (object instanceof Map) { +====1 +1:1499a +2:1536c +3:1536c + Objects.requireNonNull(array, "The array must not be null."); +====3 +1:1526c +2:1563c + * @param coll the collection to check +3:1563c + * @param collection the collection to check +==== +1:1530,1535c + public static boolean isFull(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); +2:1567,1570c + public static boolean isFull(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); +3:1567,1570c + public static boolean isFull(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).isFull(); +====3 +1:1539c +2:1574c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +3:1574c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====3 +1:1559c +2:1594c + * @param coll the collection to check +3:1594c + * @param collection the collection to check +==== +1:1563,1568c + public static int maxSize(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); +2:1598,1601c + public static int maxSize(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); +3:1598,1601c + public static int maxSize(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).maxSize(); +====3 +1:1572c +2:1605c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +3:1605c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====3 +1:1649,1651c +2:1682,1684c + * @param a the first collection, must not be null + * @param b the second collection, must not be null + * @param c the comparator to use for the merge. +3:1682,1684c + * @param iterableA the first collection, must not be null + * @param iterableB the second collection, must not be null + * @param comparator the comparator to use for the merge. +==== +1:1658,1666c + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + + if (a == null || b == null) { + throw new NullPointerException("The collections must not be null"); + } + if (c == null) { + throw new NullPointerException("The comparator must not be null"); + } +2:1691,1695c + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(c, "The comparator must not be null."); +3:1691,1696c + public static List collate(final Iterable iterableA, final Iterable iterableB, + final Comparator comparator, final boolean includeDuplicates) { + + Objects.requireNonNull(iterableA, "iterableA"); + Objects.requireNonNull(iterableB, "iterableB"); + Objects.requireNonNull(comparator, "comparator"); +====3 +1:1669,1670c +2:1698,1699c + final int totalSize = a instanceof Collection && b instanceof Collection ? + Math.max(1, ((Collection) a).size() + ((Collection) b).size()) : 10; +3:1699,1700c + final int totalSize = iterableA instanceof Collection && iterableB instanceof Collection ? + Math.max(1, ((Collection) iterableA).size() + ((Collection) iterableB).size()) : 10; +====3 +1:1672c +2:1701c + final Iterator iterator = new CollatingIterator<>(c, a.iterator(), b.iterator()); +3:1702c + final Iterator iterator = new CollatingIterator<>(comparator, iterableA.iterator(), iterableB.iterator()); +====1 +1:1713a +2:1743c +3:1744c + Objects.requireNonNull(collection, "The collection must not be null."); +====3 +1:1724,1727c +2:1754,1757c + * Returns a collection containing all the elements in collection + * that are also in retain. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless retain does not contain e, in which +3:1755,1758c + * Returns a collection containing all the elements in {@code collection} + * that are also in {@code retain}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code retain} does not contain {@code e}, in which +====3 +1:1729c +2:1759c + * the collection c and thus cannot call c.retainAll(retain);. +3:1760c + * the collection {@code c} and thus cannot call {@code c.retainAll(retain);}. +====3 +1:1731,1732c +2:1761,1762c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in retain. If it's contained, it's added +3:1762,1763c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code retain}. If it's contained, it's added +====3 +1:1734c +2:1764c + * retain that provides a fast (e.g. O(1)) implementation of +3:1765c + * {@code retain} that provides a fast (e.g. O(1)) implementation of +====3 +1:1741,1742c +2:1771,1772c + * @return a Collection containing all the elements of collection + * that occur at least once in retain. +3:1772,1773c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain}. +====1 +1:1746a +2:1777,1778c +3:1778,1779c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); +====3 +1:1752,1755c +2:1784,1787c + * collection that are also in retain. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless retain does not contain e, in which case +3:1785,1788c + * {@code collection} that are also in {@code retain}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code retain} does not contain {@code e}, in which case +====3 +1:1757,1758c +2:1789,1790c + * modify the collection c and thus cannot call + * c.retainAll(retain);. +3:1790,1791c + * modify the collection {@code c} and thus cannot call + * {@code c.retainAll(retain);}. +====3 +1:1762c +2:1794c + * in collection and retain. Hence this method is +3:1795c + * in {@code collection} and {@code retain}. Hence this method is +====3 +1:1771,1772c +2:1803,1804c + * @return a Collection containing all the elements of collection + * that occur at least once in retain according to the equator +3:1804,1805c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain} according to the {@code equator} +====1 +1:1779c + +2:1811,1813c +3:1812,1814c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1800,1802c + * @param input the collection will be operated, can't be null + * @param startIndex the start index (inclusive) to remove element, can't be less than 0 + * @param endIndex the end index (exclusive) to remove, can't be less than startIndex +2:1834,1836c +3:1835,1837c + * @param input the collection will be operated, must not be null + * @param startIndex the start index (inclusive) to remove element, must not be less than 0 + * @param endIndex the end index (exclusive) to remove, must not be less than startIndex +====1 +1:1807,1809c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1841c +3:1842c + Objects.requireNonNull(input, "The collection must not be null."); +====1 +1:1831,1833c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1863c +3:1864c + Objects.requireNonNull(input, "The collection must not be null."); +====3 +1:1845,1846c +2:1875,1876c + Collection result = new ArrayList(count); + Iterator iterator = input.iterator(); +3:1876,1877c + final Collection result = new ArrayList<>(count); + final Iterator iterator = input.iterator(); +====3 +1:1861,1865c +2:1891,1895c + * Removes the elements in remove from collection. That is, this + * method returns a collection containing all the elements in c + * that are not in remove. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless remove contains e, in which +3:1892,1896c + * Removes the elements in {@code remove} from {@code collection}. That is, this + * method returns a collection containing all the elements in {@code c} + * that are not in {@code remove}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code remove} contains {@code e}, in which +====3 +1:1867c +2:1897c + * the collection c and thus cannot call collection.removeAll(remove);. +3:1898c + * the collection {@code c} and thus cannot call {@code collection.removeAll(remove);}. +====3 +1:1869,1870c +2:1899,1900c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in remove. If it's not contained, it's added +3:1900,1901c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code remove}. If it's not contained, it's added +====3 +1:1872c +2:1902c + * remove that provides a fast (e.g. O(1)) implementation of +3:1903c + * {@code remove} that provides a fast (e.g. O(1)) implementation of +====3 +1:1878,1880c +2:1908,1910c + * @param remove the items to be removed from the returned collection + * @return a Collection containing all the elements of collection except + * any elements that also occur in remove. +3:1909,1911c + * @param remove the items to be removed from the returned {@code collection} + * @return a {@code Collection} containing all the elements of {@code collection} except + * any elements that also occur in {@code remove}. +====3 +1:1886c +2:1916c + } +3:1917c + } +====3 +1:1889c +2:1919c + * Removes all elements in remove from collection. +3:1920c + * Removes all elements in {@code remove} from {@code collection}. +====3 +1:1891,1894c +2:1921,1924c + * collection that are not in remove. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless remove contains e, in which case the +3:1922,1925c + * {@code collection} that are not in {@code remove}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code remove} contains {@code e}, in which case the +====3 +1:1896,1897c +2:1926,1927c + * the collection c and thus cannot call + * collection.removeAll(remove). +3:1927,1928c + * the collection {@code c} and thus cannot call + * {@code collection.removeAll(remove)}. +====3 +1:1901c +2:1931c + * in collection and remove. Hence this method is +3:1932c + * in {@code collection} and {@code remove}. Hence this method is +====3 +1:1910,1911c +2:1940,1941c + * @return a Collection containing all the elements of collection + * except any element that if equal according to the equator +3:1941,1942c + * @return a {@code Collection} containing all the elements of {@code collection} + * except any element that if equal according to the {@code equator} +====1 +1:1918c + +2:1948,1950c +3:1949,1951c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(remove, "The items to be removed must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1960a +2:1993c +3:1994c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1977a +2:2011c +3:2012c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1997a +2:2032,2033c +3:2033,2034c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(predicate, "The predicate must not be null."); +====1 +1:2020a +2:2057,2058c +3:2058,2059c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(transformer, "The transformer must not be null."); +==== +1:2035,2037c + if (collection == null) { + throw new NullPointerException("Collection must not be null."); + } +2:2073c + Objects.requireNonNull(collection, "The collection must not be null."); +3:2074c + Objects.requireNonNull(collection, "collection"); diff --git a/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_adjacent/diff_CollectionUtilsTest.java.txt b/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_adjacent/diff_CollectionUtilsTest.java.txt new file mode 100644 index 0000000000..27920f0ad8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_adjacent/diff_CollectionUtilsTest.java.txt @@ -0,0 +1,652 @@ +====1 +1:193a +2:194,198c +3:194,198c + @Test(expected = NullPointerException.class) + public void testGetCardinalityMapNull() { + CollectionUtils.getCardinalityMap(null); + } + +====1 +1:349a +2:355,377c +3:355,377c + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.containsAny(null, list); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl3() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:383a +2:412,433c +3:412,433c + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl1() { + final String[] oneArr = {"1"}; + CollectionUtils.containsAny(null, oneArr); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullArray() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:402a +2:453,466c +3:453,466c + @Test(expected = NullPointerException.class) + public void testUnionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(null, list); + } + + @Test(expected = NullPointerException.class) + public void testUnionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(list, null); + } + +====1 +1:421a +2:486,499c +3:486,499c + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(list, null); + } + +====1 +1:440a +2:519,532c +3:519,532c + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(null, list); + } + + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(list, null); + } + +====1 +1:475a +2:568,581c +3:568,581c + @Test(expected = NullPointerException.class) + public void testSubtractNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(null, list); + } + + @Test(expected = NullPointerException.class) + public void testSubtractNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(list, null); + } + +====1 +1:541a +2:648,661c +3:648,661c + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(list, null); + } + +====1 +1:623a +2:744,789c +3:744,789c + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(null, list, e); + } + + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(list, null, e); + } + +====1 +1:645a +2:812,825c +3:812,825c + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(list, null); + } + +====3 +1:655c +2:835c + assertNull(CollectionUtils.find(null,testPredicate)); +3:835c + assertNull(CollectionUtils.find(null, testPredicate)); +====3 +1:1279c +2:1459c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long)input).intValue()); +3:1459c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long) input).intValue()); +====1 +1:1331a +2:1512,1516c +3:1512,1516c + @Test(expected = NullPointerException.class) + public void testAddIgnoreNullNullColl() { + CollectionUtils.addIgnoreNull(null, "1"); + } + +====1 +1:1338,1349c + try { + CollectionUtils.predicatedCollection(new ArrayList(), null); + fail("Expecting NullPointerException for null predicate."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.predicatedCollection(null, predicate); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1523,1534c +3:1523,1534c + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullColl() { + final Predicate predicate = PredicateUtils.instanceofPredicate(Integer.class); + CollectionUtils.predicatedCollection(null, predicate); + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullPredicate() { + final Collection list = new ArrayList<>(); + CollectionUtils.predicatedCollection(list, null); +====1 +1:1358,1362c + try { + CollectionUtils.isFull(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1542a +3:1542a +====1 +1:1372a +2:1553,1557c +3:1553,1557c + @Test(expected = NullPointerException.class) + public void testIsFullNullColl() { + CollectionUtils.isFull(null); + } + +====1 +1:1385,1389c + try { + CollectionUtils.maxSize(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1569a +3:1569a +====1 +1:1399a +2:1580,1584c +3:1580,1584c + @Test(expected = NullPointerException.class) + public void testMaxSizeNullColl() { + CollectionUtils.maxSize(null); + } + +====1 +1:1466a +2:1652c +3:1652c + } +====1 +1:1468,1472c + try { + CollectionUtils.retainAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1654,1665c +3:1654,1665c + @Test(expected = NullPointerException.class) + public void testRetainAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.retainAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRetainAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.retainAll(base, null); +====3 +1:1477c +2:1670c + List list = new ArrayList<>(); +3:1670c + final List list = new ArrayList<>(); +====1 +1:1493c + @Test(expected=IllegalArgumentException.class) +2:1686c +3:1686c + @Test(expected=NullPointerException.class) +==== +1:1495,1496c + Collection list = null; + Collection result = CollectionUtils.removeRange(list, 0, 0); +2:1688,1689c + Collection list = null; + CollectionUtils.removeRange(list, 0, 0); +3:1688,1689c + final Collection list = null; + CollectionUtils.removeRange(list, 0, 0); +====3 +1:1501c +2:1694c + Collection list = new ArrayList<>(); +3:1694c + final Collection list = new ArrayList<>(); +====1 +1:1503c + Collection result = CollectionUtils.removeRange(list, -1, 1); +2:1696c +3:1696c + CollectionUtils.removeRange(list, -1, 1); +====3 +1:1508c +2:1701c + Collection list = new ArrayList<>(); +3:1701c + final Collection list = new ArrayList<>(); +====1 +1:1510c + Collection result = CollectionUtils.removeRange(list, 0, -1); +2:1703c +3:1703c + CollectionUtils.removeRange(list, 0, -1); +====3 +1:1515c +2:1708c + Collection list = new ArrayList<>(); +3:1708c + final Collection list = new ArrayList<>(); +====1 +1:1518c + Collection result = CollectionUtils.removeRange(list, 1, 0); +2:1711c +3:1711c + CollectionUtils.removeRange(list, 1, 0); +====3 +1:1523c +2:1716c + Collection list = new ArrayList<>(); +3:1716c + final Collection list = new ArrayList<>(); +====1 +1:1525c + Collection result = CollectionUtils.removeRange(list, 0, 2); +2:1718c +3:1718c + CollectionUtils.removeRange(list, 0, 2); +====3 +1:1530c +2:1723c + List list = new ArrayList<>(); +3:1723c + final List list = new ArrayList<>(); +====1 +1:1559c + @Test(expected=IllegalArgumentException.class) +2:1752c +3:1752c + @Test(expected=NullPointerException.class) +==== +1:1561,1562c + Collection list = null; + Collection result = CollectionUtils.removeCount(list, 0, 1); +2:1754,1755c + Collection list = null; + CollectionUtils.removeCount(list, 0, 1); +3:1754,1755c + final Collection list = null; + CollectionUtils.removeCount(list, 0, 1); +==== +1:1567,1568c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, -1, 1); +2:1760,1761c + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); +3:1760,1761c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); +==== +1:1573,1574c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, 0, -1); +2:1766,1767c + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); +3:1766,1767c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); +====3 +1:1579c +2:1772c + Collection list = new ArrayList<>(); +3:1772c + final Collection list = new ArrayList<>(); +====1 +1:1581c + Collection result = CollectionUtils.removeCount(list, 0, 2); +2:1774c +3:1774c + CollectionUtils.removeCount(list, 0, 2); +====1 +1:1607a +2:1801c +3:1801c + } +====1 +1:1609,1613c + try { + CollectionUtils.removeAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1803,1814c +3:1803,1814c + @Test(expected = NullPointerException.class) + public void testRemoveAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.removeAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRemoveAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.removeAll(base, null); +====1 +1:1622,1633c + try { + CollectionUtils.transformingCollection(new ArrayList<>(), null); + fail("Expecting NullPointerException for null transformer."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.transformingCollection(null, transformer); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1823,1834c +3:1823,1834c + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullColl() { + final Transformer transformer = TransformerUtils.nopTransformer(); + CollectionUtils.transformingCollection(null, transformer); + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullTransformer() { + final List list = new ArrayList<>(); + CollectionUtils.transformingCollection(list, null); +====3 +1:1740c +2:1941c + CollectionUtils.addAll(collectionA, new Integer[]{5}); +3:1941c + CollectionUtils.addAll(collectionA, 5); +==== +1:1744c + @Test(expected=IndexOutOfBoundsException.class) +2:1945,1979c + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected=IndexOutOfBoundsException.class) +3:1945,1979c + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected = IndexOutOfBoundsException.class) +====3 +1:1746c +2:1981c + CollectionUtils.get((Object)collectionA, -3); +3:1981c + CollectionUtils.get((Object) collectionA, -3); +====3 +1:1749c +2:1984c + @Test(expected=IndexOutOfBoundsException.class) +3:1984c + @Test(expected = IndexOutOfBoundsException.class) +====3 +1:1751c +2:1986c + CollectionUtils.get((Object)collectionA.iterator(), 30); +3:1986c + CollectionUtils.get((Object) collectionA.iterator(), 30); +====3 +1:1754c +2:1989c + @Test(expected=IllegalArgumentException.class) +3:1989c + @Test(expected = IllegalArgumentException.class) +====3 +1:1756c +2:1991c + CollectionUtils.get((Object)null, 0); +3:1991c + CollectionUtils.get((Object) null, 0); +====3 +1:1761,1762c +2:1996,1997c + assertEquals(2, CollectionUtils.get((Object)collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object)collectionA.iterator(), 2)); +3:1996,1997c + assertEquals(2, CollectionUtils.get((Object) collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object) collectionA.iterator(), 2)); +====3 +1:1764c +2:1999c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object)map, 0)); +3:1999c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object) map, 0)); +====1 +1:1795a +2:2031,2035c +3:2031,2035c + @Test(expected = NullPointerException.class) + public void testReverseArrayNull() { + CollectionUtils.reverseArray(null); + } + +====1 +1:1828a +2:2069,2073c +3:2069,2073c + public void collateException0() { + CollectionUtils.collate(null, collectionC); + } + + @Test(expected=NullPointerException.class) diff --git a/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_ignorespace/diff_CollectionUtils.java.txt b/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_ignorespace/diff_CollectionUtils.java.txt new file mode 100644 index 0000000000..dd4b5b6b97 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_ignorespace/diff_CollectionUtils.java.txt @@ -0,0 +1,985 @@ +====1 +1:30a +2:31c +3:31c + import java.util.Objects; +====1 +1:75c + public CardinalityHelper(final Iterable a, final Iterable b) { +2:76c +3:76c + CardinalityHelper(final Iterable a, final Iterable b) { +====1 +1:142c + public SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +2:143c +3:143c + SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +====1 +1:187c + * CollectionUtils should not normally be instantiated. +2:188c +3:188c + * {@code CollectionUtils} should not normally be instantiated. +====1 +1:205c + * Returns an immutable empty collection if the argument is null, +2:206c +3:206c + * Returns an immutable empty collection if the argument is {@code null}, +====1 +1:209,210c + * @param collection the collection, possibly null + * @return an empty collection if the argument is null +2:210,211c +3:210,211c + * @param collection the collection, possibly {@code null} + * @return an empty collection if the argument is {@code null} +====1 +1:232a +2:234,235c +3:234,235c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:257a +2:261,262c +3:261,262c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:287a +2:293,294c +3:293,294c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:340a +2:348,350c +3:348,350c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(p, "The predicate must not be null."); +====1 +1:357c + * Returns true iff all elements of {@code coll2} are also contained +2:367c +3:367c + * Returns {@code true} iff all elements of {@code coll2} are also contained +====1 +1:361c + * In other words, this method returns true iff the +2:371c +3:371c + * In other words, this method returns {@code true} iff the +====1 +1:376c + * @return true iff the intersection of the collections has the same cardinality +2:386c +3:386c + * @return {@code true} iff the intersection of the collections has the same cardinality +====1 +1:380a +2:391,392c +3:391,392c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====1 +1:409c + * Returns true iff at least one element is in both collections. +2:421c +3:421c + * Returns {@code true} iff at least one element is in both collections. +====1 +1:411c + * In other words, this method returns true iff the +2:423c +3:423c + * In other words, this method returns {@code true} iff the +====1 +1:415c + * @param the type of object to lookup in coll1. +2:427c +3:427c + * @param the type of object to lookup in {@code coll1}. +====1 +1:418c + * @return true iff the intersection of the collections is non-empty +2:430c +3:430c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:422a +2:435,436c +3:435,436c + Objects.requireNonNull(coll1, "The collection must not be null."); + Objects.requireNonNull(coll2, "The elements must not be null."); +====1 +1:440c + * Returns true iff at least one element is in both collections. +2:454c +3:454c + * Returns {@code true} iff at least one element is in both collections. +====1 +1:442c + * In other words, this method returns true iff the +2:456c +3:456c + * In other words, this method returns {@code true} iff the +====1 +1:448c + * @return true iff the intersection of the collections is non-empty +2:462c +3:462c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:452a +2:467,468c +3:467,468c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====1 +1:482a +2:499c +3:499c + Objects.requireNonNull(coll, "The collection must not be null."); +====1 +1:503c + * @return true iff a is a sub-collection of b +2:520c +3:520c + * @return {@code true} iff a is a sub-collection of b +====1 +1:507a +2:525,526c +3:525,526c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:528c + *
  • a.size() and b.size() represent the +2:547c +3:547c + *
  • {@code a.size()} and {@code b.size()} represent the +====1 +1:530c + *
  • a.size() < Integer.MAXVALUE
  • +2:549c +3:549c + *
  • {@code a.size() < Integer.MAXVALUE}
  • +====1 +1:535c + * @return true iff a is a proper sub-collection of b +2:554c +3:554c + * @return {@code true} iff a is a proper sub-collection of b +====1 +1:539a +2:559,560c +3:559,560c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:554c + * @return true iff the collections contain the same elements with the same cardinalities. +2:575c +3:575c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:557c + if(a.size() != b.size()) { +2:578,586c + <<<<<<< HEAD + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if(a.size() != b.size()) { + ||||||| 4551c3df1 + if(a.size() != b.size()) { + ======= + if (a.size() != b.size()) { + >>>>>>> TEMP_RIGHT_BRANCH +3:578,580c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if (a.size() != b.size()) { +====1 +1:561c + if(helper.cardinalityA.size() != helper.cardinalityB.size()) { +2:590c +3:584c + if (helper.cardinalityA.size() != helper.cardinalityB.size()) { +====1 +1:564,565c + for( final Object obj : helper.cardinalityA.keySet()) { + if(helper.freqA(obj) != helper.freqB(obj)) { +2:593,594c +3:587,588c + for (final Object obj : helper.cardinalityA.keySet()) { + if (helper.freqA(obj) != helper.freqB(obj)) { +====1 +1:591c + * @return true iff the collections contain the same elements with the same cardinalities. +2:620c +3:614c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:598,600c + if (equator == null) { + throw new NullPointerException("Equator must not be null."); + } +2:627,639c + <<<<<<< HEAD + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); + if(a.size() != b.size()) { + ||||||| 4551c3df1 + if (equator == null) { + throw new NullPointerException("Equator must not be null."); + } + + if(a.size() != b.size()) { + ======= + Objects.requireNonNull(equator, "equator"); +3:621,623c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "equator"); +==== +1:602c + if(a.size() != b.size()) { +2:641,642c + if (a.size() != b.size()) { + >>>>>>> TEMP_RIGHT_BRANCH +3:625c + if (a.size() != b.size()) { +====1 +1:626c + public EquatorWrapper(final Equator equator, final O object) { +2:666c +3:649c + EquatorWrapper(final Equator equator, final O object) { +====1 +1:655c + * @param coll the {@link Iterable} to search +2:695c +3:678c + * @param collection the {@link Iterable} to search +==== +1:663,667c + public static int cardinality(final O obj, final Iterable coll) { + if (coll == null) { + throw new NullPointerException("coll must not be null."); + } + return IterableUtils.frequency(coll, obj); +2:703,716c + <<<<<<< HEAD + public static int cardinality(final O obj, final Iterable coll) { + Objects.requireNonNull(coll, "The collection must not be null."); + return IterableUtils.frequency(coll, obj); + ||||||| 4551c3df1 + public static int cardinality(final O obj, final Iterable coll) { + if (coll == null) { + throw new NullPointerException("coll must not be null."); + } + return IterableUtils.frequency(coll, obj); + ======= + public static int cardinality(final O obj, final Iterable collection) { + return IterableUtils.frequency(Objects.requireNonNull(collection, "collection"), obj); + >>>>>>> TEMP_RIGHT_BRANCH +3:686,687c + public static int cardinality(final O obj, final Iterable collection) { + return IterableUtils.frequency(Objects.requireNonNull(collection, "collection"), obj); +====1 +1:799c + * This is equivalent to filter(collection, PredicateUtils.notPredicate(predicate)) +2:848c +3:819c + * This is equivalent to {@code filter(collection, PredicateUtils.notPredicate(predicate))} +====1 +1:856c + * A null collection or predicate matches no elements. +2:905c +3:876c + * A {@code null} collection or predicate matches no elements. +====1 +1:874c + * A null collection or predicate returns false. +2:923c +3:894c + * A {@code null} collection or predicate returns false. +====1 +1:893c + * A null predicate returns false. +2:942c +3:913c + * A {@code null} predicate returns false. +====1 +1:896c + * A null or empty collection returns true. +2:945c +3:916c + * A {@code null} or empty collection returns true. +====1 +1:916c + * A null predicate matches no elements. +2:965c +3:936c + * A {@code null} predicate matches no elements. +====1 +1:923c + * @throws NullPointerException if the input collection is null +2:971a +3:942a +====1 +1:927,928c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:975,979c +3:946,950c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====1 +1:965,966c + * Elements matching the predicate are added to the outputCollection, + * all other elements are added to the rejectedCollection. +2:1016,1017c +3:987,988c + * Elements matching the predicate are added to the {@code outputCollection}, + * all other elements are added to the {@code rejectedCollection}. +====1 +1:969,970c + * If the input predicate is null, no elements are added to + * outputCollection or rejectedCollection. +2:1020,1021c +3:991,992c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection} or {@code rejectedCollection}. +====1 +1:1010c + * If the input predicate is null, the result is an empty +2:1061c +3:1032c + * If the input predicate is {@code null}, the result is an empty +====1 +1:1018c + * @throws NullPointerException if the input collection is null +2:1068a +3:1039a +====1 +1:1022,1023c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1072,1076c +3:1043,1047c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====1 +1:1031,1032c + * If the input predicate is null, no elements are added to + * outputCollection. +2:1084,1085c +3:1055,1056c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection}. +====1 +1:1072,1073c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1125,1129c +3:1096,1100c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +==== +1:1165,1167c + if (collection == null) { + throw new NullPointerException("The collection must not be null"); + } +2:1221,1229c + <<<<<<< HEAD + Objects.requireNonNull(collection, "The collection must not be null."); + ||||||| 4551c3df1 + if (collection == null) { + throw new NullPointerException("The collection must not be null"); + } + ======= + Objects.requireNonNull(collection, "collection"); + >>>>>>> TEMP_RIGHT_BRANCH +3:1192c + Objects.requireNonNull(collection, "collection"); +====1 +1:1182a +2:1245,1246c +3:1208,1209c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterable, "The iterable of elements to add must not be null."); +====1 +1:1198a +2:1263,1264c +3:1226,1227c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterator, "The iterator of elements to add must not be null."); +====1 +1:1215a +2:1282,1283c +3:1245,1246c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(enumeration, "The enumeration of elements to add must not be null."); +====1 +1:1232a +2:1301,1302c +3:1264,1265c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(elements, "The array of elements to add must not be null."); +====1 +1:1241,1242c + * Returns the index-th value in {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +2:1311,1312c +3:1274,1275c + * Returns the {@code index}-th value in {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1244,1245c + * The Iterator is advanced to index (or to the end, if + * index exceeds the number of entries) as a side effect of this method. +2:1314,1315c +3:1277,1278c + * The Iterator is advanced to {@code index} (or to the end, if + * {@code index} exceeds the number of entries) as a side effect of this method. +====1 +1:1257a +2:1328c +3:1291c + Objects.requireNonNull(iterator, "The iterator must not be null."); +====1 +1:1273,1274c + * Returns the index-th value in the iterable's {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +2:1344,1345c +3:1307,1308c + * Returns the {@code index}-th value in the {@code iterable}'s {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1287a +2:1359c +3:1322c + Objects.requireNonNull(iterable, "The iterable must not be null."); +====1 +1:1292,1294c + * Returns the index-th value in object, throwing + * IndexOutOfBoundsException if there is no such element or + * IllegalArgumentException if object is not an +2:1364,1366c +3:1327,1329c + * Returns the {@code index}-th value in {@code object}, throwing + * {@code IndexOutOfBoundsException} if there is no such element or + * {@code IllegalArgumentException} if {@code object} is not an +====1 +1:1300,1301c + *
  • Map -- the value returned is the Map.Entry in position + * index in the map's entrySet iterator, +2:1372,1373c +3:1335,1336c + *
  • Map -- the value returned is the {@code Map.Entry} in position + * {@code index} in the map's {@code entrySet} iterator, +====1 +1:1304,1305c + *
  • Array -- the index-th array entry is returned, + * if there is such an entry; otherwise an IndexOutOfBoundsException +2:1376,1377c +3:1339,1340c + *
  • Array -- the {@code index}-th array entry is returned, + * if there is such an entry; otherwise an {@code IndexOutOfBoundsException} +====1 +1:1307c + *
  • Collection -- the value returned is the index-th object +2:1379c +3:1342c + *
  • Collection -- the value returned is the {@code index}-th object +====1 +1:1310c + * index-th object in the Iterator/Enumeration, if there +2:1382c +3:1345c + * {@code index}-th object in the Iterator/Enumeration, if there +====1 +1:1312c + * index (or to the end, if index exceeds the +2:1384c +3:1347c + * {@code index} (or to the end, if {@code index} exceeds the +====1 +1:1327c + if (object instanceof Map) { +2:1399c +3:1362c + if (object instanceof Map) { +====1 +1:1354,1355c + * Returns the index-th Map.Entry in the map's entrySet, + * throwing IndexOutOfBoundsException if there is no such element. +2:1426,1427c +3:1389,1390c + * Returns the {@code index}-th {@code Map.Entry} in the {@code map}'s {@code entrySet}, + * throwing {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1358c + * @param the key type in the {@link Map} +2:1430c +3:1393c + * @param the value type in the {@link Map} +==== +1:1364c + public static Map.Entry get(final Map map, final int index) { +2:1436,1442c + public static Map.Entry get(final Map map, final int index) { + <<<<<<< HEAD + Objects.requireNonNull(map, "The map must not be null."); + ||||||| 4551c3df1 + public static Map.Entry get(final Map map, final int index) { + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:1399,1400c + public static Map.Entry get(final Map map, final int index) { + Objects.requireNonNull(map, "The map must not be null."); +====1 +1:1392c + if (object instanceof Map) { +2:1470c +3:1428c + if (object instanceof Map) { +====1 +1:1499a +2:1578c +3:1536c + Objects.requireNonNull(array, "The array must not be null."); +====1 +1:1526c + * @param coll the collection to check +2:1605c +3:1563c + * @param collection the collection to check +==== +1:1530,1535c + public static boolean isFull(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); +2:1609,1626c + <<<<<<< HEAD + public static boolean isFull(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); + ||||||| 4551c3df1 + public static boolean isFull(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); + ======= + public static boolean isFull(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).isFull(); + >>>>>>> TEMP_RIGHT_BRANCH +3:1567,1570c + public static boolean isFull(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).isFull(); +====1 +1:1539c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +2:1630c +3:1574c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====1 +1:1559c + * @param coll the collection to check +2:1650c +3:1594c + * @param collection the collection to check +==== +1:1563,1568c + public static int maxSize(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); +2:1654,1671c + <<<<<<< HEAD + public static int maxSize(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); + ||||||| 4551c3df1 + public static int maxSize(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); + ======= + public static int maxSize(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).maxSize(); + >>>>>>> TEMP_RIGHT_BRANCH +3:1598,1601c + public static int maxSize(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).maxSize(); +====1 +1:1572c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +2:1675c +3:1605c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====1 +1:1649,1651c + * @param a the first collection, must not be null + * @param b the second collection, must not be null + * @param c the comparator to use for the merge. +2:1752,1754c +3:1682,1684c + * @param iterableA the first collection, must not be null + * @param iterableB the second collection, must not be null + * @param comparator the comparator to use for the merge. +==== +1:1658,1659c + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { +2:1761,1779c + <<<<<<< HEAD + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(c, "The comparator must not be null."); + ||||||| 4551c3df1 + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + + if (a == null || b == null) { + throw new NullPointerException("The collections must not be null"); + } + if (c == null) { + throw new NullPointerException("The comparator must not be null"); + } + ======= + public static List collate(final Iterable iterableA, final Iterable iterableB, + final Comparator comparator, final boolean includeDuplicates) { +3:1691,1692c + public static List collate(final Iterable iterableA, final Iterable iterableB, + final Comparator comparator, final boolean includeDuplicates) { +==== +1:1661,1666c + if (a == null || b == null) { + throw new NullPointerException("The collections must not be null"); + } + if (c == null) { + throw new NullPointerException("The comparator must not be null"); + } +2:1781,1784c + Objects.requireNonNull(iterableA, "iterableA"); + Objects.requireNonNull(iterableB, "iterableB"); + Objects.requireNonNull(comparator, "comparator"); + >>>>>>> TEMP_RIGHT_BRANCH +3:1694,1696c + Objects.requireNonNull(iterableA, "iterableA"); + Objects.requireNonNull(iterableB, "iterableB"); + Objects.requireNonNull(comparator, "comparator"); +====1 +1:1669,1670c + final int totalSize = a instanceof Collection && b instanceof Collection ? + Math.max(1, ((Collection) a).size() + ((Collection) b).size()) : 10; +2:1787,1788c +3:1699,1700c + final int totalSize = iterableA instanceof Collection && iterableB instanceof Collection ? + Math.max(1, ((Collection) iterableA).size() + ((Collection) iterableB).size()) : 10; +====1 +1:1672c + final Iterator iterator = new CollatingIterator<>(c, a.iterator(), b.iterator()); +2:1790c +3:1702c + final Iterator iterator = new CollatingIterator<>(comparator, iterableA.iterator(), iterableB.iterator()); +====1 +1:1713a +2:1832c +3:1744c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1724,1727c + * Returns a collection containing all the elements in collection + * that are also in retain. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless retain does not contain e, in which +2:1843,1846c +3:1755,1758c + * Returns a collection containing all the elements in {@code collection} + * that are also in {@code retain}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code retain} does not contain {@code e}, in which +====1 +1:1729c + * the collection c and thus cannot call c.retainAll(retain);. +2:1848c +3:1760c + * the collection {@code c} and thus cannot call {@code c.retainAll(retain);}. +====1 +1:1731,1732c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in retain. If it's contained, it's added +2:1850,1851c +3:1762,1763c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code retain}. If it's contained, it's added +====1 +1:1734c + * retain that provides a fast (e.g. O(1)) implementation of +2:1853c +3:1765c + * {@code retain} that provides a fast (e.g. O(1)) implementation of +====1 +1:1741,1742c + * @return a Collection containing all the elements of collection + * that occur at least once in retain. +2:1860,1861c +3:1772,1773c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain}. +====1 +1:1746a +2:1866,1867c +3:1778,1779c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); +====1 +1:1752,1755c + * collection that are also in retain. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless retain does not contain e, in which case +2:1873,1876c +3:1785,1788c + * {@code collection} that are also in {@code retain}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code retain} does not contain {@code e}, in which case +====1 +1:1757,1758c + * modify the collection c and thus cannot call + * c.retainAll(retain);. +2:1878,1879c +3:1790,1791c + * modify the collection {@code c} and thus cannot call + * {@code c.retainAll(retain);}. +====1 +1:1762c + * in collection and retain. Hence this method is +2:1883c +3:1795c + * in {@code collection} and {@code retain}. Hence this method is +====1 +1:1771,1772c + * @return a Collection containing all the elements of collection + * that occur at least once in retain according to the equator +2:1892,1893c +3:1804,1805c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain} according to the {@code equator} +====1 +1:1779c + +2:1900,1902c +3:1812,1814c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1800,1802c + * @param input the collection will be operated, can't be null + * @param startIndex the start index (inclusive) to remove element, can't be less than 0 + * @param endIndex the end index (exclusive) to remove, can't be less than startIndex +2:1923,1925c +3:1835,1837c + * @param input the collection will be operated, must not be null + * @param startIndex the start index (inclusive) to remove element, must not be less than 0 + * @param endIndex the end index (exclusive) to remove, must not be less than startIndex +====1 +1:1807,1809c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1930c +3:1842c + Objects.requireNonNull(input, "The collection must not be null."); +====1 +1:1831,1833c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1952c +3:1864c + Objects.requireNonNull(input, "The collection must not be null."); +====1 +1:1845,1846c + Collection result = new ArrayList(count); + Iterator iterator = input.iterator(); +2:1964,1965c +3:1876,1877c + final Collection result = new ArrayList<>(count); + final Iterator iterator = input.iterator(); +====1 +1:1861,1865c + * Removes the elements in remove from collection. That is, this + * method returns a collection containing all the elements in c + * that are not in remove. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless remove contains e, in which +2:1980,1984c +3:1892,1896c + * Removes the elements in {@code remove} from {@code collection}. That is, this + * method returns a collection containing all the elements in {@code c} + * that are not in {@code remove}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code remove} contains {@code e}, in which +====1 +1:1867c + * the collection c and thus cannot call collection.removeAll(remove);. +2:1986c +3:1898c + * the collection {@code c} and thus cannot call {@code collection.removeAll(remove);}. +====1 +1:1869,1870c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in remove. If it's not contained, it's added +2:1988,1989c +3:1900,1901c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code remove}. If it's not contained, it's added +====1 +1:1872c + * remove that provides a fast (e.g. O(1)) implementation of +2:1991c +3:1903c + * {@code remove} that provides a fast (e.g. O(1)) implementation of +====1 +1:1878,1880c + * @param remove the items to be removed from the returned collection + * @return a Collection containing all the elements of collection except + * any elements that also occur in remove. +2:1997,1999c +3:1909,1911c + * @param remove the items to be removed from the returned {@code collection} + * @return a {@code Collection} containing all the elements of {@code collection} except + * any elements that also occur in {@code remove}. +====3 +1:1886c +2:2005c + } +3:1917c + } +====1 +1:1889c + * Removes all elements in remove from collection. +2:2008c +3:1920c + * Removes all elements in {@code remove} from {@code collection}. +====1 +1:1891,1894c + * collection that are not in remove. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless remove contains e, in which case the +2:2010,2013c +3:1922,1925c + * {@code collection} that are not in {@code remove}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code remove} contains {@code e}, in which case the +====1 +1:1896,1897c + * the collection c and thus cannot call + * collection.removeAll(remove). +2:2015,2016c +3:1927,1928c + * the collection {@code c} and thus cannot call + * {@code collection.removeAll(remove)}. +====1 +1:1901c + * in collection and remove. Hence this method is +2:2020c +3:1932c + * in {@code collection} and {@code remove}. Hence this method is +====1 +1:1910,1911c + * @return a Collection containing all the elements of collection + * except any element that if equal according to the equator +2:2029,2030c +3:1941,1942c + * @return a {@code Collection} containing all the elements of {@code collection} + * except any element that if equal according to the {@code equator} +====1 +1:1918c + +2:2037,2039c +3:1949,1951c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(remove, "The items to be removed must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1960a +2:2082c +3:1994c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1977a +2:2100c +3:2012c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1997a +2:2121,2122c +3:2033,2034c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(predicate, "The predicate must not be null."); +====1 +1:2020a +2:2146,2147c +3:2058,2059c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(transformer, "The transformer must not be null."); +==== +1:2035,2037c + if (collection == null) { + throw new NullPointerException("Collection must not be null."); + } +2:2162,2170c + <<<<<<< HEAD + Objects.requireNonNull(collection, "The collection must not be null."); + ||||||| 4551c3df1 + if (collection == null) { + throw new NullPointerException("Collection must not be null."); + } + ======= + Objects.requireNonNull(collection, "collection"); + >>>>>>> TEMP_RIGHT_BRANCH +3:2074c + Objects.requireNonNull(collection, "collection"); diff --git a/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_ignorespace/diff_CollectionUtilsTest.java.txt b/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_ignorespace/diff_CollectionUtilsTest.java.txt new file mode 100644 index 0000000000..b98dcdbd96 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_ignorespace/diff_CollectionUtilsTest.java.txt @@ -0,0 +1,725 @@ +====1 +1:193a +2:194,198c +3:194,198c + @Test(expected = NullPointerException.class) + public void testGetCardinalityMapNull() { + CollectionUtils.getCardinalityMap(null); + } + +====1 +1:349a +2:355,377c +3:355,377c + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.containsAny(null, list); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl3() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:383a +2:412,433c +3:412,433c + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl1() { + final String[] oneArr = {"1"}; + CollectionUtils.containsAny(null, oneArr); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullArray() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:402a +2:453,466c +3:453,466c + @Test(expected = NullPointerException.class) + public void testUnionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(null, list); + } + + @Test(expected = NullPointerException.class) + public void testUnionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(list, null); + } + +====1 +1:421a +2:486,499c +3:486,499c + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(list, null); + } + +====1 +1:440a +2:519,532c +3:519,532c + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(null, list); + } + + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(list, null); + } + +====1 +1:475a +2:568,581c +3:568,581c + @Test(expected = NullPointerException.class) + public void testSubtractNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(null, list); + } + + @Test(expected = NullPointerException.class) + public void testSubtractNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(list, null); + } + +====1 +1:541a +2:648,661c +3:648,661c + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(list, null); + } + +====1 +1:623a +2:744,789c +3:744,789c + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(null, list, e); + } + + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(list, null, e); + } + +====1 +1:645a +2:812,825c +3:812,825c + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(list, null); + } + +====1 +1:655c + assertNull(CollectionUtils.find(null,testPredicate)); +2:835c +3:835c + assertNull(CollectionUtils.find(null, testPredicate)); +====1 +1:1279c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long)input).intValue()); +2:1459c +3:1459c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long) input).intValue()); +====1 +1:1331a +2:1512,1516c +3:1512,1516c + @Test(expected = NullPointerException.class) + public void testAddIgnoreNullNullColl() { + CollectionUtils.addIgnoreNull(null, "1"); + } + +====1 +1:1338,1349c + try { + CollectionUtils.predicatedCollection(new ArrayList(), null); + fail("Expecting NullPointerException for null predicate."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.predicatedCollection(null, predicate); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1523,1534c +3:1523,1534c + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullColl() { + final Predicate predicate = PredicateUtils.instanceofPredicate(Integer.class); + CollectionUtils.predicatedCollection(null, predicate); + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullPredicate() { + final Collection list = new ArrayList<>(); + CollectionUtils.predicatedCollection(list, null); +====1 +1:1358,1362c + try { + CollectionUtils.isFull(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1542a +3:1542a +====1 +1:1372a +2:1553,1557c +3:1553,1557c + @Test(expected = NullPointerException.class) + public void testIsFullNullColl() { + CollectionUtils.isFull(null); + } + +====1 +1:1385,1389c + try { + CollectionUtils.maxSize(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1569a +3:1569a +====1 +1:1399a +2:1580,1584c +3:1580,1584c + @Test(expected = NullPointerException.class) + public void testMaxSizeNullColl() { + CollectionUtils.maxSize(null); + } + +====1 +1:1466a +2:1652c +3:1652c + } +====1 +1:1468,1472c + try { + CollectionUtils.retainAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1654,1665c +3:1654,1665c + @Test(expected = NullPointerException.class) + public void testRetainAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.retainAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRetainAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.retainAll(base, null); +====1 +1:1477c + List list = new ArrayList<>(); +2:1670c +3:1670c + final List list = new ArrayList<>(); +====1 +1:1493c + @Test(expected=IllegalArgumentException.class) +2:1686c +3:1686c + @Test(expected=NullPointerException.class) +==== +1:1495,1496c + Collection list = null; + Collection result = CollectionUtils.removeRange(list, 0, 0); +2:1688,1697c + <<<<<<< HEAD + Collection list = null; + CollectionUtils.removeRange(list, 0, 0); + ||||||| 4551c3df1 + Collection list = null; + Collection result = CollectionUtils.removeRange(list, 0, 0); + ======= + final Collection list = null; + final Collection result = CollectionUtils.removeRange(list, 0, 0); + >>>>>>> TEMP_RIGHT_BRANCH +3:1688,1689c + final Collection list = null; + CollectionUtils.removeRange(list, 0, 0); +====1 +1:1501c + Collection list = new ArrayList<>(); +2:1702c +3:1694c + final Collection list = new ArrayList<>(); +==== +1:1503c + Collection result = CollectionUtils.removeRange(list, -1, 1); +2:1704,1710c + <<<<<<< HEAD + CollectionUtils.removeRange(list, -1, 1); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, -1, 1); + ======= + final Collection result = CollectionUtils.removeRange(list, -1, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1696c + CollectionUtils.removeRange(list, -1, 1); +====1 +1:1508c + Collection list = new ArrayList<>(); +2:1715c +3:1701c + final Collection list = new ArrayList<>(); +==== +1:1510c + Collection result = CollectionUtils.removeRange(list, 0, -1); +2:1717,1723c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 0, -1); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 0, -1); + ======= + final Collection result = CollectionUtils.removeRange(list, 0, -1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1703c + CollectionUtils.removeRange(list, 0, -1); +====1 +1:1515c + Collection list = new ArrayList<>(); +2:1728c +3:1708c + final Collection list = new ArrayList<>(); +==== +1:1518c + Collection result = CollectionUtils.removeRange(list, 1, 0); +2:1731,1737c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 1, 0); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 1, 0); + ======= + final Collection result = CollectionUtils.removeRange(list, 1, 0); + >>>>>>> TEMP_RIGHT_BRANCH +3:1711c + CollectionUtils.removeRange(list, 1, 0); +====1 +1:1523c + Collection list = new ArrayList<>(); +2:1742c +3:1716c + final Collection list = new ArrayList<>(); +==== +1:1525c + Collection result = CollectionUtils.removeRange(list, 0, 2); +2:1744,1750c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 0, 2); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 0, 2); + ======= + final Collection result = CollectionUtils.removeRange(list, 0, 2); + >>>>>>> TEMP_RIGHT_BRANCH +3:1718c + CollectionUtils.removeRange(list, 0, 2); +====1 +1:1530c + List list = new ArrayList<>(); +2:1755c +3:1723c + final List list = new ArrayList<>(); +====1 +1:1559c + @Test(expected=IllegalArgumentException.class) +2:1784c +3:1752c + @Test(expected=NullPointerException.class) +==== +1:1561,1562c + Collection list = null; + Collection result = CollectionUtils.removeCount(list, 0, 1); +2:1786,1795c + <<<<<<< HEAD + Collection list = null; + CollectionUtils.removeCount(list, 0, 1); + ||||||| 4551c3df1 + Collection list = null; + Collection result = CollectionUtils.removeCount(list, 0, 1); + ======= + final Collection list = null; + final Collection result = CollectionUtils.removeCount(list, 0, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1754,1755c + final Collection list = null; + CollectionUtils.removeCount(list, 0, 1); +==== +1:1567,1568c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, -1, 1); +2:1800,1809c + <<<<<<< HEAD + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); + ||||||| 4551c3df1 + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, -1, 1); + ======= + final Collection list = new ArrayList<>(); + final Collection result = CollectionUtils.removeCount(list, -1, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1760,1761c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); +==== +1:1573,1574c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, 0, -1); +2:1814,1823c + <<<<<<< HEAD + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); + ||||||| 4551c3df1 + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, 0, -1); + ======= + final Collection list = new ArrayList<>(); + final Collection result = CollectionUtils.removeCount(list, 0, -1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1766,1767c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); +====1 +1:1579c + Collection list = new ArrayList<>(); +2:1828c +3:1772c + final Collection list = new ArrayList<>(); +==== +1:1581c + Collection result = CollectionUtils.removeCount(list, 0, 2); +2:1830,1836c + <<<<<<< HEAD + CollectionUtils.removeCount(list, 0, 2); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeCount(list, 0, 2); + ======= + final Collection result = CollectionUtils.removeCount(list, 0, 2); + >>>>>>> TEMP_RIGHT_BRANCH +3:1774c + CollectionUtils.removeCount(list, 0, 2); +====1 +1:1607a +2:1863c +3:1801c + } +====1 +1:1609,1613c + try { + CollectionUtils.removeAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1865,1876c +3:1803,1814c + @Test(expected = NullPointerException.class) + public void testRemoveAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.removeAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRemoveAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.removeAll(base, null); +====1 +1:1622,1633c + try { + CollectionUtils.transformingCollection(new ArrayList<>(), null); + fail("Expecting NullPointerException for null transformer."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.transformingCollection(null, transformer); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1885,1896c +3:1823,1834c + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullColl() { + final Transformer transformer = TransformerUtils.nopTransformer(); + CollectionUtils.transformingCollection(null, transformer); + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullTransformer() { + final List list = new ArrayList<>(); + CollectionUtils.transformingCollection(list, null); +====1 +1:1740c + CollectionUtils.addAll(collectionA, new Integer[]{5}); +2:2003c +3:1941c + CollectionUtils.addAll(collectionA, 5); +==== +1:1744c + @Test(expected=IndexOutOfBoundsException.class) +2:2007,2047c + <<<<<<< HEAD + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected=IndexOutOfBoundsException.class) + ||||||| 4551c3df1 + @Test(expected=IndexOutOfBoundsException.class) + ======= + @Test(expected = IndexOutOfBoundsException.class) + >>>>>>> TEMP_RIGHT_BRANCH +3:1945,1979c + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected = IndexOutOfBoundsException.class) +====1 +1:1746c + CollectionUtils.get((Object)collectionA, -3); +2:2049c +3:1981c + CollectionUtils.get((Object) collectionA, -3); +====1 +1:1749c + @Test(expected=IndexOutOfBoundsException.class) +2:2052c +3:1984c + @Test(expected = IndexOutOfBoundsException.class) +====1 +1:1751c + CollectionUtils.get((Object)collectionA.iterator(), 30); +2:2054c +3:1986c + CollectionUtils.get((Object) collectionA.iterator(), 30); +====1 +1:1754c + @Test(expected=IllegalArgumentException.class) +2:2057c +3:1989c + @Test(expected = IllegalArgumentException.class) +====1 +1:1756c + CollectionUtils.get((Object)null, 0); +2:2059c +3:1991c + CollectionUtils.get((Object) null, 0); +====1 +1:1761,1762c + assertEquals(2, CollectionUtils.get((Object)collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object)collectionA.iterator(), 2)); +2:2064,2065c +3:1996,1997c + assertEquals(2, CollectionUtils.get((Object) collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object) collectionA.iterator(), 2)); +====1 +1:1764c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object)map, 0)); +2:2067c +3:1999c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object) map, 0)); +====1 +1:1795a +2:2099,2103c +3:2031,2035c + @Test(expected = NullPointerException.class) + public void testReverseArrayNull() { + CollectionUtils.reverseArray(null); + } + +====1 +1:1828a +2:2137,2141c +3:2069,2073c + public void collateException0() { + CollectionUtils.collate(null, collectionC); + } + + @Test(expected=NullPointerException.class) diff --git a/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_imports/diff_CollectionUtils.java.txt b/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_imports/diff_CollectionUtils.java.txt new file mode 100644 index 0000000000..9187319c0a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_imports/diff_CollectionUtils.java.txt @@ -0,0 +1,884 @@ +====1 +1:30a +2:31c +3:31c + import java.util.Objects; +====3 +1:75c +2:76c + public CardinalityHelper(final Iterable a, final Iterable b) { +3:76c + CardinalityHelper(final Iterable a, final Iterable b) { +====3 +1:142c +2:143c + public SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +3:143c + SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +====3 +1:187c +2:188c + * CollectionUtils should not normally be instantiated. +3:188c + * {@code CollectionUtils} should not normally be instantiated. +====3 +1:205c +2:206c + * Returns an immutable empty collection if the argument is null, +3:206c + * Returns an immutable empty collection if the argument is {@code null}, +====3 +1:209,210c +2:210,211c + * @param collection the collection, possibly null + * @return an empty collection if the argument is null +3:210,211c + * @param collection the collection, possibly {@code null} + * @return an empty collection if the argument is {@code null} +====1 +1:232a +2:234,235c +3:234,235c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:257a +2:261,262c +3:261,262c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:287a +2:293,294c +3:293,294c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:340a +2:348,350c +3:348,350c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(p, "The predicate must not be null."); +====3 +1:357c +2:367c + * Returns true iff all elements of {@code coll2} are also contained +3:367c + * Returns {@code true} iff all elements of {@code coll2} are also contained +====3 +1:361c +2:371c + * In other words, this method returns true iff the +3:371c + * In other words, this method returns {@code true} iff the +====3 +1:376c +2:386c + * @return true iff the intersection of the collections has the same cardinality +3:386c + * @return {@code true} iff the intersection of the collections has the same cardinality +====1 +1:380a +2:391,392c +3:391,392c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====3 +1:409c +2:421c + * Returns true iff at least one element is in both collections. +3:421c + * Returns {@code true} iff at least one element is in both collections. +====3 +1:411c +2:423c + * In other words, this method returns true iff the +3:423c + * In other words, this method returns {@code true} iff the +====3 +1:415c +2:427c + * @param the type of object to lookup in coll1. +3:427c + * @param the type of object to lookup in {@code coll1}. +====3 +1:418c +2:430c + * @return true iff the intersection of the collections is non-empty +3:430c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:422a +2:435,436c +3:435,436c + Objects.requireNonNull(coll1, "The collection must not be null."); + Objects.requireNonNull(coll2, "The elements must not be null."); +====3 +1:440c +2:454c + * Returns true iff at least one element is in both collections. +3:454c + * Returns {@code true} iff at least one element is in both collections. +====3 +1:442c +2:456c + * In other words, this method returns true iff the +3:456c + * In other words, this method returns {@code true} iff the +====3 +1:448c +2:462c + * @return true iff the intersection of the collections is non-empty +3:462c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:452a +2:467,468c +3:467,468c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====1 +1:482a +2:499c +3:499c + Objects.requireNonNull(coll, "The collection must not be null."); +====3 +1:503c +2:520c + * @return true iff a is a sub-collection of b +3:520c + * @return {@code true} iff a is a sub-collection of b +====1 +1:507a +2:525,526c +3:525,526c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====3 +1:528c +2:547c + *
  • a.size() and b.size() represent the +3:547c + *
  • {@code a.size()} and {@code b.size()} represent the +====3 +1:530c +2:549c + *
  • a.size() < Integer.MAXVALUE
  • +3:549c + *
  • {@code a.size() < Integer.MAXVALUE}
  • +====3 +1:535c +2:554c + * @return true iff a is a proper sub-collection of b +3:554c + * @return {@code true} iff a is a proper sub-collection of b +====1 +1:539a +2:559,560c +3:559,560c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====3 +1:554c +2:575c + * @return true iff the collections contain the same elements with the same cardinalities. +3:575c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:557c + if(a.size() != b.size()) { +2:578,580c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if(a.size() != b.size()) { +3:578,580c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if (a.size() != b.size()) { +====3 +1:561c +2:584c + if(helper.cardinalityA.size() != helper.cardinalityB.size()) { +3:584c + if (helper.cardinalityA.size() != helper.cardinalityB.size()) { +====3 +1:564,565c +2:587,588c + for( final Object obj : helper.cardinalityA.keySet()) { + if(helper.freqA(obj) != helper.freqB(obj)) { +3:587,588c + for (final Object obj : helper.cardinalityA.keySet()) { + if (helper.freqA(obj) != helper.freqB(obj)) { +====3 +1:591c +2:614c + * @return true iff the collections contain the same elements with the same cardinalities. +3:614c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:598,602c + if (equator == null) { + throw new NullPointerException("Equator must not be null."); + } + + if(a.size() != b.size()) { +2:621,624c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); + if(a.size() != b.size()) { +3:621,625c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "equator"); + + if (a.size() != b.size()) { +====3 +1:626c +2:648c + public EquatorWrapper(final Equator equator, final O object) { +3:649c + EquatorWrapper(final Equator equator, final O object) { +====3 +1:655c +2:677c + * @param coll the {@link Iterable} to search +3:678c + * @param collection the {@link Iterable} to search +==== +1:663,667c + public static int cardinality(final O obj, final Iterable coll) { + if (coll == null) { + throw new NullPointerException("coll must not be null."); + } + return IterableUtils.frequency(coll, obj); +2:685,687c + public static int cardinality(final O obj, final Iterable coll) { + Objects.requireNonNull(coll, "The collection must not be null."); + return IterableUtils.frequency(coll, obj); +3:686,687c + public static int cardinality(final O obj, final Iterable collection) { + return IterableUtils.frequency(Objects.requireNonNull(collection, "collection"), obj); +====3 +1:799c +2:819c + * This is equivalent to filter(collection, PredicateUtils.notPredicate(predicate)) +3:819c + * This is equivalent to {@code filter(collection, PredicateUtils.notPredicate(predicate))} +====3 +1:856c +2:876c + * A null collection or predicate matches no elements. +3:876c + * A {@code null} collection or predicate matches no elements. +====3 +1:874c +2:894c + * A null collection or predicate returns false. +3:894c + * A {@code null} collection or predicate returns false. +====3 +1:893c +2:913c + * A null predicate returns false. +3:913c + * A {@code null} predicate returns false. +====3 +1:896c +2:916c + * A null or empty collection returns true. +3:916c + * A {@code null} or empty collection returns true. +====3 +1:916c +2:936c + * A null predicate matches no elements. +3:936c + * A {@code null} predicate matches no elements. +====1 +1:923c + * @throws NullPointerException if the input collection is null +2:942a +3:942a +====1 +1:927,928c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:946,950c +3:946,950c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====3 +1:965,966c +2:987,988c + * Elements matching the predicate are added to the outputCollection, + * all other elements are added to the rejectedCollection. +3:987,988c + * Elements matching the predicate are added to the {@code outputCollection}, + * all other elements are added to the {@code rejectedCollection}. +====3 +1:969,970c +2:991,992c + * If the input predicate is null, no elements are added to + * outputCollection or rejectedCollection. +3:991,992c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection} or {@code rejectedCollection}. +====3 +1:1010c +2:1032c + * If the input predicate is null, the result is an empty +3:1032c + * If the input predicate is {@code null}, the result is an empty +====1 +1:1018c + * @throws NullPointerException if the input collection is null +2:1039a +3:1039a +====1 +1:1022,1023c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1043,1047c +3:1043,1047c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====3 +1:1031,1032c +2:1055,1056c + * If the input predicate is null, no elements are added to + * outputCollection. +3:1055,1056c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection}. +====1 +1:1072,1073c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1096,1100c +3:1096,1100c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +==== +1:1165,1167c + if (collection == null) { + throw new NullPointerException("The collection must not be null"); + } +2:1192c + Objects.requireNonNull(collection, "The collection must not be null."); +3:1192c + Objects.requireNonNull(collection, "collection"); +====1 +1:1182a +2:1208,1209c +3:1208,1209c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterable, "The iterable of elements to add must not be null."); +====1 +1:1198a +2:1226,1227c +3:1226,1227c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterator, "The iterator of elements to add must not be null."); +====1 +1:1215a +2:1245,1246c +3:1245,1246c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(enumeration, "The enumeration of elements to add must not be null."); +====1 +1:1232a +2:1264,1265c +3:1264,1265c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(elements, "The array of elements to add must not be null."); +====3 +1:1241,1242c +2:1274,1275c + * Returns the index-th value in {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +3:1274,1275c + * Returns the {@code index}-th value in {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====3 +1:1244,1245c +2:1277,1278c + * The Iterator is advanced to index (or to the end, if + * index exceeds the number of entries) as a side effect of this method. +3:1277,1278c + * The Iterator is advanced to {@code index} (or to the end, if + * {@code index} exceeds the number of entries) as a side effect of this method. +====1 +1:1257a +2:1291c +3:1291c + Objects.requireNonNull(iterator, "The iterator must not be null."); +====3 +1:1273,1274c +2:1307,1308c + * Returns the index-th value in the iterable's {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +3:1307,1308c + * Returns the {@code index}-th value in the {@code iterable}'s {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1287a +2:1322c +3:1322c + Objects.requireNonNull(iterable, "The iterable must not be null."); +====3 +1:1292,1294c +2:1327,1329c + * Returns the index-th value in object, throwing + * IndexOutOfBoundsException if there is no such element or + * IllegalArgumentException if object is not an +3:1327,1329c + * Returns the {@code index}-th value in {@code object}, throwing + * {@code IndexOutOfBoundsException} if there is no such element or + * {@code IllegalArgumentException} if {@code object} is not an +====3 +1:1300,1301c +2:1335,1336c + *
  • Map -- the value returned is the Map.Entry in position + * index in the map's entrySet iterator, +3:1335,1336c + *
  • Map -- the value returned is the {@code Map.Entry} in position + * {@code index} in the map's {@code entrySet} iterator, +====3 +1:1304,1305c +2:1339,1340c + *
  • Array -- the index-th array entry is returned, + * if there is such an entry; otherwise an IndexOutOfBoundsException +3:1339,1340c + *
  • Array -- the {@code index}-th array entry is returned, + * if there is such an entry; otherwise an {@code IndexOutOfBoundsException} +====3 +1:1307c +2:1342c + *
  • Collection -- the value returned is the index-th object +3:1342c + *
  • Collection -- the value returned is the {@code index}-th object +====3 +1:1310c +2:1345c + * index-th object in the Iterator/Enumeration, if there +3:1345c + * {@code index}-th object in the Iterator/Enumeration, if there +====3 +1:1312c +2:1347c + * index (or to the end, if index exceeds the +3:1347c + * {@code index} (or to the end, if {@code index} exceeds the +====3 +1:1327c +2:1362c + if (object instanceof Map) { +3:1362c + if (object instanceof Map) { +====3 +1:1354,1355c +2:1389,1390c + * Returns the index-th Map.Entry in the map's entrySet, + * throwing IndexOutOfBoundsException if there is no such element. +3:1389,1390c + * Returns the {@code index}-th {@code Map.Entry} in the {@code map}'s {@code entrySet}, + * throwing {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1358c + * @param the key type in the {@link Map} +2:1393c +3:1393c + * @param the value type in the {@link Map} +====1 +1:1364c + public static Map.Entry get(final Map map, final int index) { +2:1399,1400c +3:1399,1400c + public static Map.Entry get(final Map map, final int index) { + Objects.requireNonNull(map, "The map must not be null."); +====3 +1:1392c +2:1428c + if (object instanceof Map) { +3:1428c + if (object instanceof Map) { +====1 +1:1499a +2:1536c +3:1536c + Objects.requireNonNull(array, "The array must not be null."); +====3 +1:1526c +2:1563c + * @param coll the collection to check +3:1563c + * @param collection the collection to check +==== +1:1530,1535c + public static boolean isFull(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); +2:1567,1570c + public static boolean isFull(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); +3:1567,1570c + public static boolean isFull(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).isFull(); +====3 +1:1539c +2:1574c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +3:1574c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====3 +1:1559c +2:1594c + * @param coll the collection to check +3:1594c + * @param collection the collection to check +==== +1:1563,1568c + public static int maxSize(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); +2:1598,1601c + public static int maxSize(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); +3:1598,1601c + public static int maxSize(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).maxSize(); +====3 +1:1572c +2:1605c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +3:1605c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====3 +1:1649,1651c +2:1682,1684c + * @param a the first collection, must not be null + * @param b the second collection, must not be null + * @param c the comparator to use for the merge. +3:1682,1684c + * @param iterableA the first collection, must not be null + * @param iterableB the second collection, must not be null + * @param comparator the comparator to use for the merge. +==== +1:1658,1666c + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + + if (a == null || b == null) { + throw new NullPointerException("The collections must not be null"); + } + if (c == null) { + throw new NullPointerException("The comparator must not be null"); + } +2:1691,1695c + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(c, "The comparator must not be null."); +3:1691,1696c + public static List collate(final Iterable iterableA, final Iterable iterableB, + final Comparator comparator, final boolean includeDuplicates) { + + Objects.requireNonNull(iterableA, "iterableA"); + Objects.requireNonNull(iterableB, "iterableB"); + Objects.requireNonNull(comparator, "comparator"); +====3 +1:1669,1670c +2:1698,1699c + final int totalSize = a instanceof Collection && b instanceof Collection ? + Math.max(1, ((Collection) a).size() + ((Collection) b).size()) : 10; +3:1699,1700c + final int totalSize = iterableA instanceof Collection && iterableB instanceof Collection ? + Math.max(1, ((Collection) iterableA).size() + ((Collection) iterableB).size()) : 10; +====3 +1:1672c +2:1701c + final Iterator iterator = new CollatingIterator<>(c, a.iterator(), b.iterator()); +3:1702c + final Iterator iterator = new CollatingIterator<>(comparator, iterableA.iterator(), iterableB.iterator()); +====1 +1:1713a +2:1743c +3:1744c + Objects.requireNonNull(collection, "The collection must not be null."); +====3 +1:1724,1727c +2:1754,1757c + * Returns a collection containing all the elements in collection + * that are also in retain. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless retain does not contain e, in which +3:1755,1758c + * Returns a collection containing all the elements in {@code collection} + * that are also in {@code retain}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code retain} does not contain {@code e}, in which +====3 +1:1729c +2:1759c + * the collection c and thus cannot call c.retainAll(retain);. +3:1760c + * the collection {@code c} and thus cannot call {@code c.retainAll(retain);}. +====3 +1:1731,1732c +2:1761,1762c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in retain. If it's contained, it's added +3:1762,1763c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code retain}. If it's contained, it's added +====3 +1:1734c +2:1764c + * retain that provides a fast (e.g. O(1)) implementation of +3:1765c + * {@code retain} that provides a fast (e.g. O(1)) implementation of +====3 +1:1741,1742c +2:1771,1772c + * @return a Collection containing all the elements of collection + * that occur at least once in retain. +3:1772,1773c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain}. +====1 +1:1746a +2:1777,1778c +3:1778,1779c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); +====3 +1:1752,1755c +2:1784,1787c + * collection that are also in retain. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless retain does not contain e, in which case +3:1785,1788c + * {@code collection} that are also in {@code retain}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code retain} does not contain {@code e}, in which case +====3 +1:1757,1758c +2:1789,1790c + * modify the collection c and thus cannot call + * c.retainAll(retain);. +3:1790,1791c + * modify the collection {@code c} and thus cannot call + * {@code c.retainAll(retain);}. +====3 +1:1762c +2:1794c + * in collection and retain. Hence this method is +3:1795c + * in {@code collection} and {@code retain}. Hence this method is +====3 +1:1771,1772c +2:1803,1804c + * @return a Collection containing all the elements of collection + * that occur at least once in retain according to the equator +3:1804,1805c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain} according to the {@code equator} +====1 +1:1779c + +2:1811,1813c +3:1812,1814c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1800,1802c + * @param input the collection will be operated, can't be null + * @param startIndex the start index (inclusive) to remove element, can't be less than 0 + * @param endIndex the end index (exclusive) to remove, can't be less than startIndex +2:1834,1836c +3:1835,1837c + * @param input the collection will be operated, must not be null + * @param startIndex the start index (inclusive) to remove element, must not be less than 0 + * @param endIndex the end index (exclusive) to remove, must not be less than startIndex +====1 +1:1807,1809c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1841c +3:1842c + Objects.requireNonNull(input, "The collection must not be null."); +====1 +1:1831,1833c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1863c +3:1864c + Objects.requireNonNull(input, "The collection must not be null."); +====3 +1:1845,1846c +2:1875,1876c + Collection result = new ArrayList(count); + Iterator iterator = input.iterator(); +3:1876,1877c + final Collection result = new ArrayList<>(count); + final Iterator iterator = input.iterator(); +====3 +1:1861,1865c +2:1891,1895c + * Removes the elements in remove from collection. That is, this + * method returns a collection containing all the elements in c + * that are not in remove. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless remove contains e, in which +3:1892,1896c + * Removes the elements in {@code remove} from {@code collection}. That is, this + * method returns a collection containing all the elements in {@code c} + * that are not in {@code remove}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code remove} contains {@code e}, in which +====3 +1:1867c +2:1897c + * the collection c and thus cannot call collection.removeAll(remove);. +3:1898c + * the collection {@code c} and thus cannot call {@code collection.removeAll(remove);}. +====3 +1:1869,1870c +2:1899,1900c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in remove. If it's not contained, it's added +3:1900,1901c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code remove}. If it's not contained, it's added +====3 +1:1872c +2:1902c + * remove that provides a fast (e.g. O(1)) implementation of +3:1903c + * {@code remove} that provides a fast (e.g. O(1)) implementation of +====3 +1:1878,1880c +2:1908,1910c + * @param remove the items to be removed from the returned collection + * @return a Collection containing all the elements of collection except + * any elements that also occur in remove. +3:1909,1911c + * @param remove the items to be removed from the returned {@code collection} + * @return a {@code Collection} containing all the elements of {@code collection} except + * any elements that also occur in {@code remove}. +====3 +1:1886c +2:1916c + } +3:1917c + } +====3 +1:1889c +2:1919c + * Removes all elements in remove from collection. +3:1920c + * Removes all elements in {@code remove} from {@code collection}. +====3 +1:1891,1894c +2:1921,1924c + * collection that are not in remove. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless remove contains e, in which case the +3:1922,1925c + * {@code collection} that are not in {@code remove}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code remove} contains {@code e}, in which case the +====3 +1:1896,1897c +2:1926,1927c + * the collection c and thus cannot call + * collection.removeAll(remove). +3:1927,1928c + * the collection {@code c} and thus cannot call + * {@code collection.removeAll(remove)}. +====3 +1:1901c +2:1931c + * in collection and remove. Hence this method is +3:1932c + * in {@code collection} and {@code remove}. Hence this method is +====3 +1:1910,1911c +2:1940,1941c + * @return a Collection containing all the elements of collection + * except any element that if equal according to the equator +3:1941,1942c + * @return a {@code Collection} containing all the elements of {@code collection} + * except any element that if equal according to the {@code equator} +====1 +1:1918c + +2:1948,1950c +3:1949,1951c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(remove, "The items to be removed must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1960a +2:1993c +3:1994c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1977a +2:2011c +3:2012c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1997a +2:2032,2033c +3:2033,2034c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(predicate, "The predicate must not be null."); +====1 +1:2020a +2:2057,2058c +3:2058,2059c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(transformer, "The transformer must not be null."); +==== +1:2035,2037c + if (collection == null) { + throw new NullPointerException("Collection must not be null."); + } +2:2073c + Objects.requireNonNull(collection, "The collection must not be null."); +3:2074c + Objects.requireNonNull(collection, "collection"); diff --git a/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_imports/diff_CollectionUtilsTest.java.txt b/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_imports/diff_CollectionUtilsTest.java.txt new file mode 100644 index 0000000000..27920f0ad8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_imports/diff_CollectionUtilsTest.java.txt @@ -0,0 +1,652 @@ +====1 +1:193a +2:194,198c +3:194,198c + @Test(expected = NullPointerException.class) + public void testGetCardinalityMapNull() { + CollectionUtils.getCardinalityMap(null); + } + +====1 +1:349a +2:355,377c +3:355,377c + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.containsAny(null, list); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl3() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:383a +2:412,433c +3:412,433c + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl1() { + final String[] oneArr = {"1"}; + CollectionUtils.containsAny(null, oneArr); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullArray() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:402a +2:453,466c +3:453,466c + @Test(expected = NullPointerException.class) + public void testUnionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(null, list); + } + + @Test(expected = NullPointerException.class) + public void testUnionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(list, null); + } + +====1 +1:421a +2:486,499c +3:486,499c + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(list, null); + } + +====1 +1:440a +2:519,532c +3:519,532c + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(null, list); + } + + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(list, null); + } + +====1 +1:475a +2:568,581c +3:568,581c + @Test(expected = NullPointerException.class) + public void testSubtractNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(null, list); + } + + @Test(expected = NullPointerException.class) + public void testSubtractNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(list, null); + } + +====1 +1:541a +2:648,661c +3:648,661c + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(list, null); + } + +====1 +1:623a +2:744,789c +3:744,789c + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(null, list, e); + } + + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(list, null, e); + } + +====1 +1:645a +2:812,825c +3:812,825c + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(list, null); + } + +====3 +1:655c +2:835c + assertNull(CollectionUtils.find(null,testPredicate)); +3:835c + assertNull(CollectionUtils.find(null, testPredicate)); +====3 +1:1279c +2:1459c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long)input).intValue()); +3:1459c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long) input).intValue()); +====1 +1:1331a +2:1512,1516c +3:1512,1516c + @Test(expected = NullPointerException.class) + public void testAddIgnoreNullNullColl() { + CollectionUtils.addIgnoreNull(null, "1"); + } + +====1 +1:1338,1349c + try { + CollectionUtils.predicatedCollection(new ArrayList(), null); + fail("Expecting NullPointerException for null predicate."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.predicatedCollection(null, predicate); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1523,1534c +3:1523,1534c + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullColl() { + final Predicate predicate = PredicateUtils.instanceofPredicate(Integer.class); + CollectionUtils.predicatedCollection(null, predicate); + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullPredicate() { + final Collection list = new ArrayList<>(); + CollectionUtils.predicatedCollection(list, null); +====1 +1:1358,1362c + try { + CollectionUtils.isFull(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1542a +3:1542a +====1 +1:1372a +2:1553,1557c +3:1553,1557c + @Test(expected = NullPointerException.class) + public void testIsFullNullColl() { + CollectionUtils.isFull(null); + } + +====1 +1:1385,1389c + try { + CollectionUtils.maxSize(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1569a +3:1569a +====1 +1:1399a +2:1580,1584c +3:1580,1584c + @Test(expected = NullPointerException.class) + public void testMaxSizeNullColl() { + CollectionUtils.maxSize(null); + } + +====1 +1:1466a +2:1652c +3:1652c + } +====1 +1:1468,1472c + try { + CollectionUtils.retainAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1654,1665c +3:1654,1665c + @Test(expected = NullPointerException.class) + public void testRetainAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.retainAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRetainAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.retainAll(base, null); +====3 +1:1477c +2:1670c + List list = new ArrayList<>(); +3:1670c + final List list = new ArrayList<>(); +====1 +1:1493c + @Test(expected=IllegalArgumentException.class) +2:1686c +3:1686c + @Test(expected=NullPointerException.class) +==== +1:1495,1496c + Collection list = null; + Collection result = CollectionUtils.removeRange(list, 0, 0); +2:1688,1689c + Collection list = null; + CollectionUtils.removeRange(list, 0, 0); +3:1688,1689c + final Collection list = null; + CollectionUtils.removeRange(list, 0, 0); +====3 +1:1501c +2:1694c + Collection list = new ArrayList<>(); +3:1694c + final Collection list = new ArrayList<>(); +====1 +1:1503c + Collection result = CollectionUtils.removeRange(list, -1, 1); +2:1696c +3:1696c + CollectionUtils.removeRange(list, -1, 1); +====3 +1:1508c +2:1701c + Collection list = new ArrayList<>(); +3:1701c + final Collection list = new ArrayList<>(); +====1 +1:1510c + Collection result = CollectionUtils.removeRange(list, 0, -1); +2:1703c +3:1703c + CollectionUtils.removeRange(list, 0, -1); +====3 +1:1515c +2:1708c + Collection list = new ArrayList<>(); +3:1708c + final Collection list = new ArrayList<>(); +====1 +1:1518c + Collection result = CollectionUtils.removeRange(list, 1, 0); +2:1711c +3:1711c + CollectionUtils.removeRange(list, 1, 0); +====3 +1:1523c +2:1716c + Collection list = new ArrayList<>(); +3:1716c + final Collection list = new ArrayList<>(); +====1 +1:1525c + Collection result = CollectionUtils.removeRange(list, 0, 2); +2:1718c +3:1718c + CollectionUtils.removeRange(list, 0, 2); +====3 +1:1530c +2:1723c + List list = new ArrayList<>(); +3:1723c + final List list = new ArrayList<>(); +====1 +1:1559c + @Test(expected=IllegalArgumentException.class) +2:1752c +3:1752c + @Test(expected=NullPointerException.class) +==== +1:1561,1562c + Collection list = null; + Collection result = CollectionUtils.removeCount(list, 0, 1); +2:1754,1755c + Collection list = null; + CollectionUtils.removeCount(list, 0, 1); +3:1754,1755c + final Collection list = null; + CollectionUtils.removeCount(list, 0, 1); +==== +1:1567,1568c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, -1, 1); +2:1760,1761c + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); +3:1760,1761c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); +==== +1:1573,1574c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, 0, -1); +2:1766,1767c + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); +3:1766,1767c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); +====3 +1:1579c +2:1772c + Collection list = new ArrayList<>(); +3:1772c + final Collection list = new ArrayList<>(); +====1 +1:1581c + Collection result = CollectionUtils.removeCount(list, 0, 2); +2:1774c +3:1774c + CollectionUtils.removeCount(list, 0, 2); +====1 +1:1607a +2:1801c +3:1801c + } +====1 +1:1609,1613c + try { + CollectionUtils.removeAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1803,1814c +3:1803,1814c + @Test(expected = NullPointerException.class) + public void testRemoveAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.removeAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRemoveAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.removeAll(base, null); +====1 +1:1622,1633c + try { + CollectionUtils.transformingCollection(new ArrayList<>(), null); + fail("Expecting NullPointerException for null transformer."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.transformingCollection(null, transformer); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1823,1834c +3:1823,1834c + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullColl() { + final Transformer transformer = TransformerUtils.nopTransformer(); + CollectionUtils.transformingCollection(null, transformer); + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullTransformer() { + final List list = new ArrayList<>(); + CollectionUtils.transformingCollection(list, null); +====3 +1:1740c +2:1941c + CollectionUtils.addAll(collectionA, new Integer[]{5}); +3:1941c + CollectionUtils.addAll(collectionA, 5); +==== +1:1744c + @Test(expected=IndexOutOfBoundsException.class) +2:1945,1979c + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected=IndexOutOfBoundsException.class) +3:1945,1979c + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected = IndexOutOfBoundsException.class) +====3 +1:1746c +2:1981c + CollectionUtils.get((Object)collectionA, -3); +3:1981c + CollectionUtils.get((Object) collectionA, -3); +====3 +1:1749c +2:1984c + @Test(expected=IndexOutOfBoundsException.class) +3:1984c + @Test(expected = IndexOutOfBoundsException.class) +====3 +1:1751c +2:1986c + CollectionUtils.get((Object)collectionA.iterator(), 30); +3:1986c + CollectionUtils.get((Object) collectionA.iterator(), 30); +====3 +1:1754c +2:1989c + @Test(expected=IllegalArgumentException.class) +3:1989c + @Test(expected = IllegalArgumentException.class) +====3 +1:1756c +2:1991c + CollectionUtils.get((Object)null, 0); +3:1991c + CollectionUtils.get((Object) null, 0); +====3 +1:1761,1762c +2:1996,1997c + assertEquals(2, CollectionUtils.get((Object)collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object)collectionA.iterator(), 2)); +3:1996,1997c + assertEquals(2, CollectionUtils.get((Object) collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object) collectionA.iterator(), 2)); +====3 +1:1764c +2:1999c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object)map, 0)); +3:1999c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object) map, 0)); +====1 +1:1795a +2:2031,2035c +3:2031,2035c + @Test(expected = NullPointerException.class) + public void testReverseArrayNull() { + CollectionUtils.reverseArray(null); + } + +====1 +1:1828a +2:2069,2073c +3:2069,2073c + public void collateException0() { + CollectionUtils.collate(null, collectionC); + } + + @Test(expected=NullPointerException.class) diff --git a/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_imports_ignorespace/diff_CollectionUtils.java.txt b/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_imports_ignorespace/diff_CollectionUtils.java.txt new file mode 100644 index 0000000000..9187319c0a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_imports_ignorespace/diff_CollectionUtils.java.txt @@ -0,0 +1,884 @@ +====1 +1:30a +2:31c +3:31c + import java.util.Objects; +====3 +1:75c +2:76c + public CardinalityHelper(final Iterable a, final Iterable b) { +3:76c + CardinalityHelper(final Iterable a, final Iterable b) { +====3 +1:142c +2:143c + public SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +3:143c + SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +====3 +1:187c +2:188c + * CollectionUtils should not normally be instantiated. +3:188c + * {@code CollectionUtils} should not normally be instantiated. +====3 +1:205c +2:206c + * Returns an immutable empty collection if the argument is null, +3:206c + * Returns an immutable empty collection if the argument is {@code null}, +====3 +1:209,210c +2:210,211c + * @param collection the collection, possibly null + * @return an empty collection if the argument is null +3:210,211c + * @param collection the collection, possibly {@code null} + * @return an empty collection if the argument is {@code null} +====1 +1:232a +2:234,235c +3:234,235c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:257a +2:261,262c +3:261,262c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:287a +2:293,294c +3:293,294c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:340a +2:348,350c +3:348,350c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(p, "The predicate must not be null."); +====3 +1:357c +2:367c + * Returns true iff all elements of {@code coll2} are also contained +3:367c + * Returns {@code true} iff all elements of {@code coll2} are also contained +====3 +1:361c +2:371c + * In other words, this method returns true iff the +3:371c + * In other words, this method returns {@code true} iff the +====3 +1:376c +2:386c + * @return true iff the intersection of the collections has the same cardinality +3:386c + * @return {@code true} iff the intersection of the collections has the same cardinality +====1 +1:380a +2:391,392c +3:391,392c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====3 +1:409c +2:421c + * Returns true iff at least one element is in both collections. +3:421c + * Returns {@code true} iff at least one element is in both collections. +====3 +1:411c +2:423c + * In other words, this method returns true iff the +3:423c + * In other words, this method returns {@code true} iff the +====3 +1:415c +2:427c + * @param the type of object to lookup in coll1. +3:427c + * @param the type of object to lookup in {@code coll1}. +====3 +1:418c +2:430c + * @return true iff the intersection of the collections is non-empty +3:430c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:422a +2:435,436c +3:435,436c + Objects.requireNonNull(coll1, "The collection must not be null."); + Objects.requireNonNull(coll2, "The elements must not be null."); +====3 +1:440c +2:454c + * Returns true iff at least one element is in both collections. +3:454c + * Returns {@code true} iff at least one element is in both collections. +====3 +1:442c +2:456c + * In other words, this method returns true iff the +3:456c + * In other words, this method returns {@code true} iff the +====3 +1:448c +2:462c + * @return true iff the intersection of the collections is non-empty +3:462c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:452a +2:467,468c +3:467,468c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====1 +1:482a +2:499c +3:499c + Objects.requireNonNull(coll, "The collection must not be null."); +====3 +1:503c +2:520c + * @return true iff a is a sub-collection of b +3:520c + * @return {@code true} iff a is a sub-collection of b +====1 +1:507a +2:525,526c +3:525,526c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====3 +1:528c +2:547c + *
  • a.size() and b.size() represent the +3:547c + *
  • {@code a.size()} and {@code b.size()} represent the +====3 +1:530c +2:549c + *
  • a.size() < Integer.MAXVALUE
  • +3:549c + *
  • {@code a.size() < Integer.MAXVALUE}
  • +====3 +1:535c +2:554c + * @return true iff a is a proper sub-collection of b +3:554c + * @return {@code true} iff a is a proper sub-collection of b +====1 +1:539a +2:559,560c +3:559,560c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====3 +1:554c +2:575c + * @return true iff the collections contain the same elements with the same cardinalities. +3:575c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:557c + if(a.size() != b.size()) { +2:578,580c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if(a.size() != b.size()) { +3:578,580c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if (a.size() != b.size()) { +====3 +1:561c +2:584c + if(helper.cardinalityA.size() != helper.cardinalityB.size()) { +3:584c + if (helper.cardinalityA.size() != helper.cardinalityB.size()) { +====3 +1:564,565c +2:587,588c + for( final Object obj : helper.cardinalityA.keySet()) { + if(helper.freqA(obj) != helper.freqB(obj)) { +3:587,588c + for (final Object obj : helper.cardinalityA.keySet()) { + if (helper.freqA(obj) != helper.freqB(obj)) { +====3 +1:591c +2:614c + * @return true iff the collections contain the same elements with the same cardinalities. +3:614c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:598,602c + if (equator == null) { + throw new NullPointerException("Equator must not be null."); + } + + if(a.size() != b.size()) { +2:621,624c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); + if(a.size() != b.size()) { +3:621,625c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "equator"); + + if (a.size() != b.size()) { +====3 +1:626c +2:648c + public EquatorWrapper(final Equator equator, final O object) { +3:649c + EquatorWrapper(final Equator equator, final O object) { +====3 +1:655c +2:677c + * @param coll the {@link Iterable} to search +3:678c + * @param collection the {@link Iterable} to search +==== +1:663,667c + public static int cardinality(final O obj, final Iterable coll) { + if (coll == null) { + throw new NullPointerException("coll must not be null."); + } + return IterableUtils.frequency(coll, obj); +2:685,687c + public static int cardinality(final O obj, final Iterable coll) { + Objects.requireNonNull(coll, "The collection must not be null."); + return IterableUtils.frequency(coll, obj); +3:686,687c + public static int cardinality(final O obj, final Iterable collection) { + return IterableUtils.frequency(Objects.requireNonNull(collection, "collection"), obj); +====3 +1:799c +2:819c + * This is equivalent to filter(collection, PredicateUtils.notPredicate(predicate)) +3:819c + * This is equivalent to {@code filter(collection, PredicateUtils.notPredicate(predicate))} +====3 +1:856c +2:876c + * A null collection or predicate matches no elements. +3:876c + * A {@code null} collection or predicate matches no elements. +====3 +1:874c +2:894c + * A null collection or predicate returns false. +3:894c + * A {@code null} collection or predicate returns false. +====3 +1:893c +2:913c + * A null predicate returns false. +3:913c + * A {@code null} predicate returns false. +====3 +1:896c +2:916c + * A null or empty collection returns true. +3:916c + * A {@code null} or empty collection returns true. +====3 +1:916c +2:936c + * A null predicate matches no elements. +3:936c + * A {@code null} predicate matches no elements. +====1 +1:923c + * @throws NullPointerException if the input collection is null +2:942a +3:942a +====1 +1:927,928c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:946,950c +3:946,950c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====3 +1:965,966c +2:987,988c + * Elements matching the predicate are added to the outputCollection, + * all other elements are added to the rejectedCollection. +3:987,988c + * Elements matching the predicate are added to the {@code outputCollection}, + * all other elements are added to the {@code rejectedCollection}. +====3 +1:969,970c +2:991,992c + * If the input predicate is null, no elements are added to + * outputCollection or rejectedCollection. +3:991,992c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection} or {@code rejectedCollection}. +====3 +1:1010c +2:1032c + * If the input predicate is null, the result is an empty +3:1032c + * If the input predicate is {@code null}, the result is an empty +====1 +1:1018c + * @throws NullPointerException if the input collection is null +2:1039a +3:1039a +====1 +1:1022,1023c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1043,1047c +3:1043,1047c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====3 +1:1031,1032c +2:1055,1056c + * If the input predicate is null, no elements are added to + * outputCollection. +3:1055,1056c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection}. +====1 +1:1072,1073c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1096,1100c +3:1096,1100c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +==== +1:1165,1167c + if (collection == null) { + throw new NullPointerException("The collection must not be null"); + } +2:1192c + Objects.requireNonNull(collection, "The collection must not be null."); +3:1192c + Objects.requireNonNull(collection, "collection"); +====1 +1:1182a +2:1208,1209c +3:1208,1209c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterable, "The iterable of elements to add must not be null."); +====1 +1:1198a +2:1226,1227c +3:1226,1227c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterator, "The iterator of elements to add must not be null."); +====1 +1:1215a +2:1245,1246c +3:1245,1246c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(enumeration, "The enumeration of elements to add must not be null."); +====1 +1:1232a +2:1264,1265c +3:1264,1265c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(elements, "The array of elements to add must not be null."); +====3 +1:1241,1242c +2:1274,1275c + * Returns the index-th value in {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +3:1274,1275c + * Returns the {@code index}-th value in {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====3 +1:1244,1245c +2:1277,1278c + * The Iterator is advanced to index (or to the end, if + * index exceeds the number of entries) as a side effect of this method. +3:1277,1278c + * The Iterator is advanced to {@code index} (or to the end, if + * {@code index} exceeds the number of entries) as a side effect of this method. +====1 +1:1257a +2:1291c +3:1291c + Objects.requireNonNull(iterator, "The iterator must not be null."); +====3 +1:1273,1274c +2:1307,1308c + * Returns the index-th value in the iterable's {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +3:1307,1308c + * Returns the {@code index}-th value in the {@code iterable}'s {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1287a +2:1322c +3:1322c + Objects.requireNonNull(iterable, "The iterable must not be null."); +====3 +1:1292,1294c +2:1327,1329c + * Returns the index-th value in object, throwing + * IndexOutOfBoundsException if there is no such element or + * IllegalArgumentException if object is not an +3:1327,1329c + * Returns the {@code index}-th value in {@code object}, throwing + * {@code IndexOutOfBoundsException} if there is no such element or + * {@code IllegalArgumentException} if {@code object} is not an +====3 +1:1300,1301c +2:1335,1336c + *
  • Map -- the value returned is the Map.Entry in position + * index in the map's entrySet iterator, +3:1335,1336c + *
  • Map -- the value returned is the {@code Map.Entry} in position + * {@code index} in the map's {@code entrySet} iterator, +====3 +1:1304,1305c +2:1339,1340c + *
  • Array -- the index-th array entry is returned, + * if there is such an entry; otherwise an IndexOutOfBoundsException +3:1339,1340c + *
  • Array -- the {@code index}-th array entry is returned, + * if there is such an entry; otherwise an {@code IndexOutOfBoundsException} +====3 +1:1307c +2:1342c + *
  • Collection -- the value returned is the index-th object +3:1342c + *
  • Collection -- the value returned is the {@code index}-th object +====3 +1:1310c +2:1345c + * index-th object in the Iterator/Enumeration, if there +3:1345c + * {@code index}-th object in the Iterator/Enumeration, if there +====3 +1:1312c +2:1347c + * index (or to the end, if index exceeds the +3:1347c + * {@code index} (or to the end, if {@code index} exceeds the +====3 +1:1327c +2:1362c + if (object instanceof Map) { +3:1362c + if (object instanceof Map) { +====3 +1:1354,1355c +2:1389,1390c + * Returns the index-th Map.Entry in the map's entrySet, + * throwing IndexOutOfBoundsException if there is no such element. +3:1389,1390c + * Returns the {@code index}-th {@code Map.Entry} in the {@code map}'s {@code entrySet}, + * throwing {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1358c + * @param the key type in the {@link Map} +2:1393c +3:1393c + * @param the value type in the {@link Map} +====1 +1:1364c + public static Map.Entry get(final Map map, final int index) { +2:1399,1400c +3:1399,1400c + public static Map.Entry get(final Map map, final int index) { + Objects.requireNonNull(map, "The map must not be null."); +====3 +1:1392c +2:1428c + if (object instanceof Map) { +3:1428c + if (object instanceof Map) { +====1 +1:1499a +2:1536c +3:1536c + Objects.requireNonNull(array, "The array must not be null."); +====3 +1:1526c +2:1563c + * @param coll the collection to check +3:1563c + * @param collection the collection to check +==== +1:1530,1535c + public static boolean isFull(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); +2:1567,1570c + public static boolean isFull(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); +3:1567,1570c + public static boolean isFull(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).isFull(); +====3 +1:1539c +2:1574c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +3:1574c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====3 +1:1559c +2:1594c + * @param coll the collection to check +3:1594c + * @param collection the collection to check +==== +1:1563,1568c + public static int maxSize(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); +2:1598,1601c + public static int maxSize(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); +3:1598,1601c + public static int maxSize(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).maxSize(); +====3 +1:1572c +2:1605c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +3:1605c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====3 +1:1649,1651c +2:1682,1684c + * @param a the first collection, must not be null + * @param b the second collection, must not be null + * @param c the comparator to use for the merge. +3:1682,1684c + * @param iterableA the first collection, must not be null + * @param iterableB the second collection, must not be null + * @param comparator the comparator to use for the merge. +==== +1:1658,1666c + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + + if (a == null || b == null) { + throw new NullPointerException("The collections must not be null"); + } + if (c == null) { + throw new NullPointerException("The comparator must not be null"); + } +2:1691,1695c + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(c, "The comparator must not be null."); +3:1691,1696c + public static List collate(final Iterable iterableA, final Iterable iterableB, + final Comparator comparator, final boolean includeDuplicates) { + + Objects.requireNonNull(iterableA, "iterableA"); + Objects.requireNonNull(iterableB, "iterableB"); + Objects.requireNonNull(comparator, "comparator"); +====3 +1:1669,1670c +2:1698,1699c + final int totalSize = a instanceof Collection && b instanceof Collection ? + Math.max(1, ((Collection) a).size() + ((Collection) b).size()) : 10; +3:1699,1700c + final int totalSize = iterableA instanceof Collection && iterableB instanceof Collection ? + Math.max(1, ((Collection) iterableA).size() + ((Collection) iterableB).size()) : 10; +====3 +1:1672c +2:1701c + final Iterator iterator = new CollatingIterator<>(c, a.iterator(), b.iterator()); +3:1702c + final Iterator iterator = new CollatingIterator<>(comparator, iterableA.iterator(), iterableB.iterator()); +====1 +1:1713a +2:1743c +3:1744c + Objects.requireNonNull(collection, "The collection must not be null."); +====3 +1:1724,1727c +2:1754,1757c + * Returns a collection containing all the elements in collection + * that are also in retain. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless retain does not contain e, in which +3:1755,1758c + * Returns a collection containing all the elements in {@code collection} + * that are also in {@code retain}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code retain} does not contain {@code e}, in which +====3 +1:1729c +2:1759c + * the collection c and thus cannot call c.retainAll(retain);. +3:1760c + * the collection {@code c} and thus cannot call {@code c.retainAll(retain);}. +====3 +1:1731,1732c +2:1761,1762c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in retain. If it's contained, it's added +3:1762,1763c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code retain}. If it's contained, it's added +====3 +1:1734c +2:1764c + * retain that provides a fast (e.g. O(1)) implementation of +3:1765c + * {@code retain} that provides a fast (e.g. O(1)) implementation of +====3 +1:1741,1742c +2:1771,1772c + * @return a Collection containing all the elements of collection + * that occur at least once in retain. +3:1772,1773c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain}. +====1 +1:1746a +2:1777,1778c +3:1778,1779c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); +====3 +1:1752,1755c +2:1784,1787c + * collection that are also in retain. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless retain does not contain e, in which case +3:1785,1788c + * {@code collection} that are also in {@code retain}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code retain} does not contain {@code e}, in which case +====3 +1:1757,1758c +2:1789,1790c + * modify the collection c and thus cannot call + * c.retainAll(retain);. +3:1790,1791c + * modify the collection {@code c} and thus cannot call + * {@code c.retainAll(retain);}. +====3 +1:1762c +2:1794c + * in collection and retain. Hence this method is +3:1795c + * in {@code collection} and {@code retain}. Hence this method is +====3 +1:1771,1772c +2:1803,1804c + * @return a Collection containing all the elements of collection + * that occur at least once in retain according to the equator +3:1804,1805c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain} according to the {@code equator} +====1 +1:1779c + +2:1811,1813c +3:1812,1814c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1800,1802c + * @param input the collection will be operated, can't be null + * @param startIndex the start index (inclusive) to remove element, can't be less than 0 + * @param endIndex the end index (exclusive) to remove, can't be less than startIndex +2:1834,1836c +3:1835,1837c + * @param input the collection will be operated, must not be null + * @param startIndex the start index (inclusive) to remove element, must not be less than 0 + * @param endIndex the end index (exclusive) to remove, must not be less than startIndex +====1 +1:1807,1809c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1841c +3:1842c + Objects.requireNonNull(input, "The collection must not be null."); +====1 +1:1831,1833c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1863c +3:1864c + Objects.requireNonNull(input, "The collection must not be null."); +====3 +1:1845,1846c +2:1875,1876c + Collection result = new ArrayList(count); + Iterator iterator = input.iterator(); +3:1876,1877c + final Collection result = new ArrayList<>(count); + final Iterator iterator = input.iterator(); +====3 +1:1861,1865c +2:1891,1895c + * Removes the elements in remove from collection. That is, this + * method returns a collection containing all the elements in c + * that are not in remove. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless remove contains e, in which +3:1892,1896c + * Removes the elements in {@code remove} from {@code collection}. That is, this + * method returns a collection containing all the elements in {@code c} + * that are not in {@code remove}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code remove} contains {@code e}, in which +====3 +1:1867c +2:1897c + * the collection c and thus cannot call collection.removeAll(remove);. +3:1898c + * the collection {@code c} and thus cannot call {@code collection.removeAll(remove);}. +====3 +1:1869,1870c +2:1899,1900c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in remove. If it's not contained, it's added +3:1900,1901c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code remove}. If it's not contained, it's added +====3 +1:1872c +2:1902c + * remove that provides a fast (e.g. O(1)) implementation of +3:1903c + * {@code remove} that provides a fast (e.g. O(1)) implementation of +====3 +1:1878,1880c +2:1908,1910c + * @param remove the items to be removed from the returned collection + * @return a Collection containing all the elements of collection except + * any elements that also occur in remove. +3:1909,1911c + * @param remove the items to be removed from the returned {@code collection} + * @return a {@code Collection} containing all the elements of {@code collection} except + * any elements that also occur in {@code remove}. +====3 +1:1886c +2:1916c + } +3:1917c + } +====3 +1:1889c +2:1919c + * Removes all elements in remove from collection. +3:1920c + * Removes all elements in {@code remove} from {@code collection}. +====3 +1:1891,1894c +2:1921,1924c + * collection that are not in remove. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless remove contains e, in which case the +3:1922,1925c + * {@code collection} that are not in {@code remove}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code remove} contains {@code e}, in which case the +====3 +1:1896,1897c +2:1926,1927c + * the collection c and thus cannot call + * collection.removeAll(remove). +3:1927,1928c + * the collection {@code c} and thus cannot call + * {@code collection.removeAll(remove)}. +====3 +1:1901c +2:1931c + * in collection and remove. Hence this method is +3:1932c + * in {@code collection} and {@code remove}. Hence this method is +====3 +1:1910,1911c +2:1940,1941c + * @return a Collection containing all the elements of collection + * except any element that if equal according to the equator +3:1941,1942c + * @return a {@code Collection} containing all the elements of {@code collection} + * except any element that if equal according to the {@code equator} +====1 +1:1918c + +2:1948,1950c +3:1949,1951c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(remove, "The items to be removed must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1960a +2:1993c +3:1994c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1977a +2:2011c +3:2012c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1997a +2:2032,2033c +3:2033,2034c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(predicate, "The predicate must not be null."); +====1 +1:2020a +2:2057,2058c +3:2058,2059c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(transformer, "The transformer must not be null."); +==== +1:2035,2037c + if (collection == null) { + throw new NullPointerException("Collection must not be null."); + } +2:2073c + Objects.requireNonNull(collection, "The collection must not be null."); +3:2074c + Objects.requireNonNull(collection, "collection"); diff --git a/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_imports_ignorespace/diff_CollectionUtilsTest.java.txt b/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_imports_ignorespace/diff_CollectionUtilsTest.java.txt new file mode 100644 index 0000000000..27920f0ad8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/gitmerge_ort_imports_ignorespace/diff_CollectionUtilsTest.java.txt @@ -0,0 +1,652 @@ +====1 +1:193a +2:194,198c +3:194,198c + @Test(expected = NullPointerException.class) + public void testGetCardinalityMapNull() { + CollectionUtils.getCardinalityMap(null); + } + +====1 +1:349a +2:355,377c +3:355,377c + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.containsAny(null, list); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl3() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:383a +2:412,433c +3:412,433c + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl1() { + final String[] oneArr = {"1"}; + CollectionUtils.containsAny(null, oneArr); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullArray() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:402a +2:453,466c +3:453,466c + @Test(expected = NullPointerException.class) + public void testUnionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(null, list); + } + + @Test(expected = NullPointerException.class) + public void testUnionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(list, null); + } + +====1 +1:421a +2:486,499c +3:486,499c + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(list, null); + } + +====1 +1:440a +2:519,532c +3:519,532c + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(null, list); + } + + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(list, null); + } + +====1 +1:475a +2:568,581c +3:568,581c + @Test(expected = NullPointerException.class) + public void testSubtractNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(null, list); + } + + @Test(expected = NullPointerException.class) + public void testSubtractNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(list, null); + } + +====1 +1:541a +2:648,661c +3:648,661c + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(list, null); + } + +====1 +1:623a +2:744,789c +3:744,789c + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(null, list, e); + } + + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(list, null, e); + } + +====1 +1:645a +2:812,825c +3:812,825c + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(list, null); + } + +====3 +1:655c +2:835c + assertNull(CollectionUtils.find(null,testPredicate)); +3:835c + assertNull(CollectionUtils.find(null, testPredicate)); +====3 +1:1279c +2:1459c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long)input).intValue()); +3:1459c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long) input).intValue()); +====1 +1:1331a +2:1512,1516c +3:1512,1516c + @Test(expected = NullPointerException.class) + public void testAddIgnoreNullNullColl() { + CollectionUtils.addIgnoreNull(null, "1"); + } + +====1 +1:1338,1349c + try { + CollectionUtils.predicatedCollection(new ArrayList(), null); + fail("Expecting NullPointerException for null predicate."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.predicatedCollection(null, predicate); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1523,1534c +3:1523,1534c + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullColl() { + final Predicate predicate = PredicateUtils.instanceofPredicate(Integer.class); + CollectionUtils.predicatedCollection(null, predicate); + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullPredicate() { + final Collection list = new ArrayList<>(); + CollectionUtils.predicatedCollection(list, null); +====1 +1:1358,1362c + try { + CollectionUtils.isFull(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1542a +3:1542a +====1 +1:1372a +2:1553,1557c +3:1553,1557c + @Test(expected = NullPointerException.class) + public void testIsFullNullColl() { + CollectionUtils.isFull(null); + } + +====1 +1:1385,1389c + try { + CollectionUtils.maxSize(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1569a +3:1569a +====1 +1:1399a +2:1580,1584c +3:1580,1584c + @Test(expected = NullPointerException.class) + public void testMaxSizeNullColl() { + CollectionUtils.maxSize(null); + } + +====1 +1:1466a +2:1652c +3:1652c + } +====1 +1:1468,1472c + try { + CollectionUtils.retainAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1654,1665c +3:1654,1665c + @Test(expected = NullPointerException.class) + public void testRetainAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.retainAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRetainAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.retainAll(base, null); +====3 +1:1477c +2:1670c + List list = new ArrayList<>(); +3:1670c + final List list = new ArrayList<>(); +====1 +1:1493c + @Test(expected=IllegalArgumentException.class) +2:1686c +3:1686c + @Test(expected=NullPointerException.class) +==== +1:1495,1496c + Collection list = null; + Collection result = CollectionUtils.removeRange(list, 0, 0); +2:1688,1689c + Collection list = null; + CollectionUtils.removeRange(list, 0, 0); +3:1688,1689c + final Collection list = null; + CollectionUtils.removeRange(list, 0, 0); +====3 +1:1501c +2:1694c + Collection list = new ArrayList<>(); +3:1694c + final Collection list = new ArrayList<>(); +====1 +1:1503c + Collection result = CollectionUtils.removeRange(list, -1, 1); +2:1696c +3:1696c + CollectionUtils.removeRange(list, -1, 1); +====3 +1:1508c +2:1701c + Collection list = new ArrayList<>(); +3:1701c + final Collection list = new ArrayList<>(); +====1 +1:1510c + Collection result = CollectionUtils.removeRange(list, 0, -1); +2:1703c +3:1703c + CollectionUtils.removeRange(list, 0, -1); +====3 +1:1515c +2:1708c + Collection list = new ArrayList<>(); +3:1708c + final Collection list = new ArrayList<>(); +====1 +1:1518c + Collection result = CollectionUtils.removeRange(list, 1, 0); +2:1711c +3:1711c + CollectionUtils.removeRange(list, 1, 0); +====3 +1:1523c +2:1716c + Collection list = new ArrayList<>(); +3:1716c + final Collection list = new ArrayList<>(); +====1 +1:1525c + Collection result = CollectionUtils.removeRange(list, 0, 2); +2:1718c +3:1718c + CollectionUtils.removeRange(list, 0, 2); +====3 +1:1530c +2:1723c + List list = new ArrayList<>(); +3:1723c + final List list = new ArrayList<>(); +====1 +1:1559c + @Test(expected=IllegalArgumentException.class) +2:1752c +3:1752c + @Test(expected=NullPointerException.class) +==== +1:1561,1562c + Collection list = null; + Collection result = CollectionUtils.removeCount(list, 0, 1); +2:1754,1755c + Collection list = null; + CollectionUtils.removeCount(list, 0, 1); +3:1754,1755c + final Collection list = null; + CollectionUtils.removeCount(list, 0, 1); +==== +1:1567,1568c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, -1, 1); +2:1760,1761c + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); +3:1760,1761c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); +==== +1:1573,1574c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, 0, -1); +2:1766,1767c + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); +3:1766,1767c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); +====3 +1:1579c +2:1772c + Collection list = new ArrayList<>(); +3:1772c + final Collection list = new ArrayList<>(); +====1 +1:1581c + Collection result = CollectionUtils.removeCount(list, 0, 2); +2:1774c +3:1774c + CollectionUtils.removeCount(list, 0, 2); +====1 +1:1607a +2:1801c +3:1801c + } +====1 +1:1609,1613c + try { + CollectionUtils.removeAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1803,1814c +3:1803,1814c + @Test(expected = NullPointerException.class) + public void testRemoveAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.removeAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRemoveAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.removeAll(base, null); +====1 +1:1622,1633c + try { + CollectionUtils.transformingCollection(new ArrayList<>(), null); + fail("Expecting NullPointerException for null transformer."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.transformingCollection(null, transformer); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1823,1834c +3:1823,1834c + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullColl() { + final Transformer transformer = TransformerUtils.nopTransformer(); + CollectionUtils.transformingCollection(null, transformer); + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullTransformer() { + final List list = new ArrayList<>(); + CollectionUtils.transformingCollection(list, null); +====3 +1:1740c +2:1941c + CollectionUtils.addAll(collectionA, new Integer[]{5}); +3:1941c + CollectionUtils.addAll(collectionA, 5); +==== +1:1744c + @Test(expected=IndexOutOfBoundsException.class) +2:1945,1979c + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected=IndexOutOfBoundsException.class) +3:1945,1979c + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected = IndexOutOfBoundsException.class) +====3 +1:1746c +2:1981c + CollectionUtils.get((Object)collectionA, -3); +3:1981c + CollectionUtils.get((Object) collectionA, -3); +====3 +1:1749c +2:1984c + @Test(expected=IndexOutOfBoundsException.class) +3:1984c + @Test(expected = IndexOutOfBoundsException.class) +====3 +1:1751c +2:1986c + CollectionUtils.get((Object)collectionA.iterator(), 30); +3:1986c + CollectionUtils.get((Object) collectionA.iterator(), 30); +====3 +1:1754c +2:1989c + @Test(expected=IllegalArgumentException.class) +3:1989c + @Test(expected = IllegalArgumentException.class) +====3 +1:1756c +2:1991c + CollectionUtils.get((Object)null, 0); +3:1991c + CollectionUtils.get((Object) null, 0); +====3 +1:1761,1762c +2:1996,1997c + assertEquals(2, CollectionUtils.get((Object)collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object)collectionA.iterator(), 2)); +3:1996,1997c + assertEquals(2, CollectionUtils.get((Object) collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object) collectionA.iterator(), 2)); +====3 +1:1764c +2:1999c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object)map, 0)); +3:1999c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object) map, 0)); +====1 +1:1795a +2:2031,2035c +3:2031,2035c + @Test(expected = NullPointerException.class) + public void testReverseArrayNull() { + CollectionUtils.reverseArray(null); + } + +====1 +1:1828a +2:2069,2073c +3:2069,2073c + public void collateException0() { + CollectionUtils.collate(null, collectionC); + } + + @Test(expected=NullPointerException.class) diff --git a/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_histogram/diff_CollectionUtils.java.txt b/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_histogram/diff_CollectionUtils.java.txt new file mode 100644 index 0000000000..a61e64f7d0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_histogram/diff_CollectionUtils.java.txt @@ -0,0 +1,985 @@ +====1 +1:30a +2:31c +3:31c + import java.util.Objects; +====1 +1:75c + public CardinalityHelper(final Iterable a, final Iterable b) { +2:76c +3:76c + CardinalityHelper(final Iterable a, final Iterable b) { +====1 +1:142c + public SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +2:143c +3:143c + SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +====1 +1:187c + * CollectionUtils should not normally be instantiated. +2:188c +3:188c + * {@code CollectionUtils} should not normally be instantiated. +====1 +1:205c + * Returns an immutable empty collection if the argument is null, +2:206c +3:206c + * Returns an immutable empty collection if the argument is {@code null}, +====1 +1:209,210c + * @param collection the collection, possibly null + * @return an empty collection if the argument is null +2:210,211c +3:210,211c + * @param collection the collection, possibly {@code null} + * @return an empty collection if the argument is {@code null} +====1 +1:232a +2:234,235c +3:234,235c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:257a +2:261,262c +3:261,262c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:287a +2:293,294c +3:293,294c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:340a +2:348,350c +3:348,350c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(p, "The predicate must not be null."); +====1 +1:357c + * Returns true iff all elements of {@code coll2} are also contained +2:367c +3:367c + * Returns {@code true} iff all elements of {@code coll2} are also contained +====1 +1:361c + * In other words, this method returns true iff the +2:371c +3:371c + * In other words, this method returns {@code true} iff the +====1 +1:376c + * @return true iff the intersection of the collections has the same cardinality +2:386c +3:386c + * @return {@code true} iff the intersection of the collections has the same cardinality +====1 +1:380a +2:391,392c +3:391,392c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====1 +1:409c + * Returns true iff at least one element is in both collections. +2:421c +3:421c + * Returns {@code true} iff at least one element is in both collections. +====1 +1:411c + * In other words, this method returns true iff the +2:423c +3:423c + * In other words, this method returns {@code true} iff the +====1 +1:415c + * @param the type of object to lookup in coll1. +2:427c +3:427c + * @param the type of object to lookup in {@code coll1}. +====1 +1:418c + * @return true iff the intersection of the collections is non-empty +2:430c +3:430c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:422a +2:435,436c +3:435,436c + Objects.requireNonNull(coll1, "The collection must not be null."); + Objects.requireNonNull(coll2, "The elements must not be null."); +====1 +1:440c + * Returns true iff at least one element is in both collections. +2:454c +3:454c + * Returns {@code true} iff at least one element is in both collections. +====1 +1:442c + * In other words, this method returns true iff the +2:456c +3:456c + * In other words, this method returns {@code true} iff the +====1 +1:448c + * @return true iff the intersection of the collections is non-empty +2:462c +3:462c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:452a +2:467,468c +3:467,468c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====1 +1:482a +2:499c +3:499c + Objects.requireNonNull(coll, "The collection must not be null."); +====1 +1:503c + * @return true iff a is a sub-collection of b +2:520c +3:520c + * @return {@code true} iff a is a sub-collection of b +====1 +1:507a +2:525,526c +3:525,526c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:528c + *
  • a.size() and b.size() represent the +2:547c +3:547c + *
  • {@code a.size()} and {@code b.size()} represent the +====1 +1:530c + *
  • a.size() < Integer.MAXVALUE
  • +2:549c +3:549c + *
  • {@code a.size() < Integer.MAXVALUE}
  • +====1 +1:535c + * @return true iff a is a proper sub-collection of b +2:554c +3:554c + * @return {@code true} iff a is a proper sub-collection of b +====1 +1:539a +2:559,560c +3:559,560c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:554c + * @return true iff the collections contain the same elements with the same cardinalities. +2:575c +3:575c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:557c + if(a.size() != b.size()) { +2:578,586c + <<<<<<< HEAD + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if(a.size() != b.size()) { + ||||||| 4551c3df1 + if(a.size() != b.size()) { + ======= + if (a.size() != b.size()) { + >>>>>>> TEMP_RIGHT_BRANCH +3:578,580c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if (a.size() != b.size()) { +====1 +1:561c + if(helper.cardinalityA.size() != helper.cardinalityB.size()) { +2:590c +3:584c + if (helper.cardinalityA.size() != helper.cardinalityB.size()) { +====1 +1:564,565c + for( final Object obj : helper.cardinalityA.keySet()) { + if(helper.freqA(obj) != helper.freqB(obj)) { +2:593,594c +3:587,588c + for (final Object obj : helper.cardinalityA.keySet()) { + if (helper.freqA(obj) != helper.freqB(obj)) { +====1 +1:591c + * @return true iff the collections contain the same elements with the same cardinalities. +2:620c +3:614c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:598,600c + if (equator == null) { + throw new NullPointerException("Equator must not be null."); + } +2:627,639c + <<<<<<< HEAD + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); + if(a.size() != b.size()) { + ||||||| 4551c3df1 + if (equator == null) { + throw new NullPointerException("Equator must not be null."); + } + + if(a.size() != b.size()) { + ======= + Objects.requireNonNull(equator, "equator"); +3:621,623c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "equator"); +==== +1:602c + if(a.size() != b.size()) { +2:641,642c + if (a.size() != b.size()) { + >>>>>>> TEMP_RIGHT_BRANCH +3:625c + if (a.size() != b.size()) { +====1 +1:626c + public EquatorWrapper(final Equator equator, final O object) { +2:666c +3:649c + EquatorWrapper(final Equator equator, final O object) { +====1 +1:655c + * @param coll the {@link Iterable} to search +2:695c +3:678c + * @param collection the {@link Iterable} to search +==== +1:663,667c + public static int cardinality(final O obj, final Iterable coll) { + if (coll == null) { + throw new NullPointerException("coll must not be null."); + } + return IterableUtils.frequency(coll, obj); +2:703,716c + <<<<<<< HEAD + public static int cardinality(final O obj, final Iterable coll) { + Objects.requireNonNull(coll, "The collection must not be null."); + return IterableUtils.frequency(coll, obj); + ||||||| 4551c3df1 + public static int cardinality(final O obj, final Iterable coll) { + if (coll == null) { + throw new NullPointerException("coll must not be null."); + } + return IterableUtils.frequency(coll, obj); + ======= + public static int cardinality(final O obj, final Iterable collection) { + return IterableUtils.frequency(Objects.requireNonNull(collection, "collection"), obj); + >>>>>>> TEMP_RIGHT_BRANCH +3:686,687c + public static int cardinality(final O obj, final Iterable collection) { + return IterableUtils.frequency(Objects.requireNonNull(collection, "collection"), obj); +====1 +1:799c + * This is equivalent to filter(collection, PredicateUtils.notPredicate(predicate)) +2:848c +3:819c + * This is equivalent to {@code filter(collection, PredicateUtils.notPredicate(predicate))} +====1 +1:856c + * A null collection or predicate matches no elements. +2:905c +3:876c + * A {@code null} collection or predicate matches no elements. +====1 +1:874c + * A null collection or predicate returns false. +2:923c +3:894c + * A {@code null} collection or predicate returns false. +====1 +1:893c + * A null predicate returns false. +2:942c +3:913c + * A {@code null} predicate returns false. +====1 +1:896c + * A null or empty collection returns true. +2:945c +3:916c + * A {@code null} or empty collection returns true. +====1 +1:916c + * A null predicate matches no elements. +2:965c +3:936c + * A {@code null} predicate matches no elements. +====1 +1:923c + * @throws NullPointerException if the input collection is null +2:971a +3:942a +====1 +1:927,928c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:975,979c +3:946,950c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====1 +1:965,966c + * Elements matching the predicate are added to the outputCollection, + * all other elements are added to the rejectedCollection. +2:1016,1017c +3:987,988c + * Elements matching the predicate are added to the {@code outputCollection}, + * all other elements are added to the {@code rejectedCollection}. +====1 +1:969,970c + * If the input predicate is null, no elements are added to + * outputCollection or rejectedCollection. +2:1020,1021c +3:991,992c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection} or {@code rejectedCollection}. +====1 +1:1010c + * If the input predicate is null, the result is an empty +2:1061c +3:1032c + * If the input predicate is {@code null}, the result is an empty +====1 +1:1018c + * @throws NullPointerException if the input collection is null +2:1068a +3:1039a +====1 +1:1022,1023c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1072,1076c +3:1043,1047c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====1 +1:1031,1032c + * If the input predicate is null, no elements are added to + * outputCollection. +2:1084,1085c +3:1055,1056c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection}. +====1 +1:1072,1073c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1125,1129c +3:1096,1100c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +==== +1:1165,1167c + if (collection == null) { + throw new NullPointerException("The collection must not be null"); + } +2:1221,1229c + <<<<<<< HEAD + Objects.requireNonNull(collection, "The collection must not be null."); + ||||||| 4551c3df1 + if (collection == null) { + throw new NullPointerException("The collection must not be null"); + } + ======= + Objects.requireNonNull(collection, "collection"); + >>>>>>> TEMP_RIGHT_BRANCH +3:1192c + Objects.requireNonNull(collection, "collection"); +====1 +1:1182a +2:1245,1246c +3:1208,1209c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterable, "The iterable of elements to add must not be null."); +====1 +1:1198a +2:1263,1264c +3:1226,1227c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterator, "The iterator of elements to add must not be null."); +====1 +1:1215a +2:1282,1283c +3:1245,1246c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(enumeration, "The enumeration of elements to add must not be null."); +====1 +1:1232a +2:1301,1302c +3:1264,1265c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(elements, "The array of elements to add must not be null."); +====1 +1:1241,1242c + * Returns the index-th value in {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +2:1311,1312c +3:1274,1275c + * Returns the {@code index}-th value in {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1244,1245c + * The Iterator is advanced to index (or to the end, if + * index exceeds the number of entries) as a side effect of this method. +2:1314,1315c +3:1277,1278c + * The Iterator is advanced to {@code index} (or to the end, if + * {@code index} exceeds the number of entries) as a side effect of this method. +====1 +1:1257a +2:1328c +3:1291c + Objects.requireNonNull(iterator, "The iterator must not be null."); +====1 +1:1273,1274c + * Returns the index-th value in the iterable's {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +2:1344,1345c +3:1307,1308c + * Returns the {@code index}-th value in the {@code iterable}'s {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1287a +2:1359c +3:1322c + Objects.requireNonNull(iterable, "The iterable must not be null."); +====1 +1:1292,1294c + * Returns the index-th value in object, throwing + * IndexOutOfBoundsException if there is no such element or + * IllegalArgumentException if object is not an +2:1364,1366c +3:1327,1329c + * Returns the {@code index}-th value in {@code object}, throwing + * {@code IndexOutOfBoundsException} if there is no such element or + * {@code IllegalArgumentException} if {@code object} is not an +====1 +1:1300,1301c + *
  • Map -- the value returned is the Map.Entry in position + * index in the map's entrySet iterator, +2:1372,1373c +3:1335,1336c + *
  • Map -- the value returned is the {@code Map.Entry} in position + * {@code index} in the map's {@code entrySet} iterator, +====1 +1:1304,1305c + *
  • Array -- the index-th array entry is returned, + * if there is such an entry; otherwise an IndexOutOfBoundsException +2:1376,1377c +3:1339,1340c + *
  • Array -- the {@code index}-th array entry is returned, + * if there is such an entry; otherwise an {@code IndexOutOfBoundsException} +====1 +1:1307c + *
  • Collection -- the value returned is the index-th object +2:1379c +3:1342c + *
  • Collection -- the value returned is the {@code index}-th object +====1 +1:1310c + * index-th object in the Iterator/Enumeration, if there +2:1382c +3:1345c + * {@code index}-th object in the Iterator/Enumeration, if there +====1 +1:1312c + * index (or to the end, if index exceeds the +2:1384c +3:1347c + * {@code index} (or to the end, if {@code index} exceeds the +====1 +1:1327c + if (object instanceof Map) { +2:1399c +3:1362c + if (object instanceof Map) { +====1 +1:1354,1355c + * Returns the index-th Map.Entry in the map's entrySet, + * throwing IndexOutOfBoundsException if there is no such element. +2:1426,1427c +3:1389,1390c + * Returns the {@code index}-th {@code Map.Entry} in the {@code map}'s {@code entrySet}, + * throwing {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1358c + * @param the key type in the {@link Map} +2:1430c +3:1393c + * @param the value type in the {@link Map} +==== +1:1364c + public static Map.Entry get(final Map map, final int index) { +2:1436,1442c + public static Map.Entry get(final Map map, final int index) { + <<<<<<< HEAD + Objects.requireNonNull(map, "The map must not be null."); + ||||||| 4551c3df1 + public static Map.Entry get(final Map map, final int index) { + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:1399,1400c + public static Map.Entry get(final Map map, final int index) { + Objects.requireNonNull(map, "The map must not be null."); +====1 +1:1392c + if (object instanceof Map) { +2:1470c +3:1428c + if (object instanceof Map) { +====1 +1:1499a +2:1578c +3:1536c + Objects.requireNonNull(array, "The array must not be null."); +====1 +1:1526c + * @param coll the collection to check +2:1605c +3:1563c + * @param collection the collection to check +==== +1:1530,1535c + public static boolean isFull(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); +2:1609,1626c + <<<<<<< HEAD + public static boolean isFull(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); + ||||||| 4551c3df1 + public static boolean isFull(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); + ======= + public static boolean isFull(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).isFull(); + >>>>>>> TEMP_RIGHT_BRANCH +3:1567,1570c + public static boolean isFull(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).isFull(); +====1 +1:1539c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +2:1630c +3:1574c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====1 +1:1559c + * @param coll the collection to check +2:1650c +3:1594c + * @param collection the collection to check +==== +1:1563,1568c + public static int maxSize(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); +2:1654,1671c + <<<<<<< HEAD + public static int maxSize(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); + ||||||| 4551c3df1 + public static int maxSize(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); + ======= + public static int maxSize(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).maxSize(); + >>>>>>> TEMP_RIGHT_BRANCH +3:1598,1601c + public static int maxSize(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).maxSize(); +====1 +1:1572c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +2:1675c +3:1605c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====1 +1:1649,1651c + * @param a the first collection, must not be null + * @param b the second collection, must not be null + * @param c the comparator to use for the merge. +2:1752,1754c +3:1682,1684c + * @param iterableA the first collection, must not be null + * @param iterableB the second collection, must not be null + * @param comparator the comparator to use for the merge. +==== +1:1658,1659c + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { +2:1761,1779c + <<<<<<< HEAD + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(c, "The comparator must not be null."); + ||||||| 4551c3df1 + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + + if (a == null || b == null) { + throw new NullPointerException("The collections must not be null"); + } + if (c == null) { + throw new NullPointerException("The comparator must not be null"); + } + ======= + public static List collate(final Iterable iterableA, final Iterable iterableB, + final Comparator comparator, final boolean includeDuplicates) { +3:1691,1692c + public static List collate(final Iterable iterableA, final Iterable iterableB, + final Comparator comparator, final boolean includeDuplicates) { +==== +1:1661,1666c + if (a == null || b == null) { + throw new NullPointerException("The collections must not be null"); + } + if (c == null) { + throw new NullPointerException("The comparator must not be null"); + } +2:1781,1784c + Objects.requireNonNull(iterableA, "iterableA"); + Objects.requireNonNull(iterableB, "iterableB"); + Objects.requireNonNull(comparator, "comparator"); + >>>>>>> TEMP_RIGHT_BRANCH +3:1694,1696c + Objects.requireNonNull(iterableA, "iterableA"); + Objects.requireNonNull(iterableB, "iterableB"); + Objects.requireNonNull(comparator, "comparator"); +====1 +1:1669,1670c + final int totalSize = a instanceof Collection && b instanceof Collection ? + Math.max(1, ((Collection) a).size() + ((Collection) b).size()) : 10; +2:1787,1788c +3:1699,1700c + final int totalSize = iterableA instanceof Collection && iterableB instanceof Collection ? + Math.max(1, ((Collection) iterableA).size() + ((Collection) iterableB).size()) : 10; +====1 +1:1672c + final Iterator iterator = new CollatingIterator<>(c, a.iterator(), b.iterator()); +2:1790c +3:1702c + final Iterator iterator = new CollatingIterator<>(comparator, iterableA.iterator(), iterableB.iterator()); +====1 +1:1713a +2:1832c +3:1744c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1724,1727c + * Returns a collection containing all the elements in collection + * that are also in retain. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless retain does not contain e, in which +2:1843,1846c +3:1755,1758c + * Returns a collection containing all the elements in {@code collection} + * that are also in {@code retain}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code retain} does not contain {@code e}, in which +====1 +1:1729c + * the collection c and thus cannot call c.retainAll(retain);. +2:1848c +3:1760c + * the collection {@code c} and thus cannot call {@code c.retainAll(retain);}. +====1 +1:1731,1732c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in retain. If it's contained, it's added +2:1850,1851c +3:1762,1763c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code retain}. If it's contained, it's added +====1 +1:1734c + * retain that provides a fast (e.g. O(1)) implementation of +2:1853c +3:1765c + * {@code retain} that provides a fast (e.g. O(1)) implementation of +====1 +1:1741,1742c + * @return a Collection containing all the elements of collection + * that occur at least once in retain. +2:1860,1861c +3:1772,1773c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain}. +====1 +1:1746a +2:1866,1867c +3:1778,1779c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); +====1 +1:1752,1755c + * collection that are also in retain. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless retain does not contain e, in which case +2:1873,1876c +3:1785,1788c + * {@code collection} that are also in {@code retain}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code retain} does not contain {@code e}, in which case +====1 +1:1757,1758c + * modify the collection c and thus cannot call + * c.retainAll(retain);. +2:1878,1879c +3:1790,1791c + * modify the collection {@code c} and thus cannot call + * {@code c.retainAll(retain);}. +====1 +1:1762c + * in collection and retain. Hence this method is +2:1883c +3:1795c + * in {@code collection} and {@code retain}. Hence this method is +====1 +1:1771,1772c + * @return a Collection containing all the elements of collection + * that occur at least once in retain according to the equator +2:1892,1893c +3:1804,1805c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain} according to the {@code equator} +====1 +1:1779c + +2:1900,1902c +3:1812,1814c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1800,1802c + * @param input the collection will be operated, can't be null + * @param startIndex the start index (inclusive) to remove element, can't be less than 0 + * @param endIndex the end index (exclusive) to remove, can't be less than startIndex +2:1923,1925c +3:1835,1837c + * @param input the collection will be operated, must not be null + * @param startIndex the start index (inclusive) to remove element, must not be less than 0 + * @param endIndex the end index (exclusive) to remove, must not be less than startIndex +====1 +1:1807,1809c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1930c +3:1842c + Objects.requireNonNull(input, "The collection must not be null."); +====1 +1:1831,1833c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1952c +3:1864c + Objects.requireNonNull(input, "The collection must not be null."); +====1 +1:1845,1846c + Collection result = new ArrayList(count); + Iterator iterator = input.iterator(); +2:1964,1965c +3:1876,1877c + final Collection result = new ArrayList<>(count); + final Iterator iterator = input.iterator(); +====1 +1:1861,1865c + * Removes the elements in remove from collection. That is, this + * method returns a collection containing all the elements in c + * that are not in remove. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless remove contains e, in which +2:1980,1984c +3:1892,1896c + * Removes the elements in {@code remove} from {@code collection}. That is, this + * method returns a collection containing all the elements in {@code c} + * that are not in {@code remove}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code remove} contains {@code e}, in which +====1 +1:1867c + * the collection c and thus cannot call collection.removeAll(remove);. +2:1986c +3:1898c + * the collection {@code c} and thus cannot call {@code collection.removeAll(remove);}. +====1 +1:1869,1870c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in remove. If it's not contained, it's added +2:1988,1989c +3:1900,1901c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code remove}. If it's not contained, it's added +====1 +1:1872c + * remove that provides a fast (e.g. O(1)) implementation of +2:1991c +3:1903c + * {@code remove} that provides a fast (e.g. O(1)) implementation of +====1 +1:1878,1880c + * @param remove the items to be removed from the returned collection + * @return a Collection containing all the elements of collection except + * any elements that also occur in remove. +2:1997,1999c +3:1909,1911c + * @param remove the items to be removed from the returned {@code collection} + * @return a {@code Collection} containing all the elements of {@code collection} except + * any elements that also occur in {@code remove}. +====1 +1:1886c + } +2:2005c +3:1917c + } +====1 +1:1889c + * Removes all elements in remove from collection. +2:2008c +3:1920c + * Removes all elements in {@code remove} from {@code collection}. +====1 +1:1891,1894c + * collection that are not in remove. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless remove contains e, in which case the +2:2010,2013c +3:1922,1925c + * {@code collection} that are not in {@code remove}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code remove} contains {@code e}, in which case the +====1 +1:1896,1897c + * the collection c and thus cannot call + * collection.removeAll(remove). +2:2015,2016c +3:1927,1928c + * the collection {@code c} and thus cannot call + * {@code collection.removeAll(remove)}. +====1 +1:1901c + * in collection and remove. Hence this method is +2:2020c +3:1932c + * in {@code collection} and {@code remove}. Hence this method is +====1 +1:1910,1911c + * @return a Collection containing all the elements of collection + * except any element that if equal according to the equator +2:2029,2030c +3:1941,1942c + * @return a {@code Collection} containing all the elements of {@code collection} + * except any element that if equal according to the {@code equator} +====1 +1:1918c + +2:2037,2039c +3:1949,1951c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(remove, "The items to be removed must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1960a +2:2082c +3:1994c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1977a +2:2100c +3:2012c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1997a +2:2121,2122c +3:2033,2034c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(predicate, "The predicate must not be null."); +====1 +1:2020a +2:2146,2147c +3:2058,2059c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(transformer, "The transformer must not be null."); +==== +1:2035,2037c + if (collection == null) { + throw new NullPointerException("Collection must not be null."); + } +2:2162,2170c + <<<<<<< HEAD + Objects.requireNonNull(collection, "The collection must not be null."); + ||||||| 4551c3df1 + if (collection == null) { + throw new NullPointerException("Collection must not be null."); + } + ======= + Objects.requireNonNull(collection, "collection"); + >>>>>>> TEMP_RIGHT_BRANCH +3:2074c + Objects.requireNonNull(collection, "collection"); diff --git a/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_histogram/diff_CollectionUtilsTest.java.txt b/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_histogram/diff_CollectionUtilsTest.java.txt new file mode 100644 index 0000000000..b98dcdbd96 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_histogram/diff_CollectionUtilsTest.java.txt @@ -0,0 +1,725 @@ +====1 +1:193a +2:194,198c +3:194,198c + @Test(expected = NullPointerException.class) + public void testGetCardinalityMapNull() { + CollectionUtils.getCardinalityMap(null); + } + +====1 +1:349a +2:355,377c +3:355,377c + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.containsAny(null, list); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl3() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:383a +2:412,433c +3:412,433c + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl1() { + final String[] oneArr = {"1"}; + CollectionUtils.containsAny(null, oneArr); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullArray() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:402a +2:453,466c +3:453,466c + @Test(expected = NullPointerException.class) + public void testUnionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(null, list); + } + + @Test(expected = NullPointerException.class) + public void testUnionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(list, null); + } + +====1 +1:421a +2:486,499c +3:486,499c + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(list, null); + } + +====1 +1:440a +2:519,532c +3:519,532c + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(null, list); + } + + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(list, null); + } + +====1 +1:475a +2:568,581c +3:568,581c + @Test(expected = NullPointerException.class) + public void testSubtractNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(null, list); + } + + @Test(expected = NullPointerException.class) + public void testSubtractNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(list, null); + } + +====1 +1:541a +2:648,661c +3:648,661c + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(list, null); + } + +====1 +1:623a +2:744,789c +3:744,789c + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(null, list, e); + } + + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(list, null, e); + } + +====1 +1:645a +2:812,825c +3:812,825c + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(list, null); + } + +====1 +1:655c + assertNull(CollectionUtils.find(null,testPredicate)); +2:835c +3:835c + assertNull(CollectionUtils.find(null, testPredicate)); +====1 +1:1279c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long)input).intValue()); +2:1459c +3:1459c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long) input).intValue()); +====1 +1:1331a +2:1512,1516c +3:1512,1516c + @Test(expected = NullPointerException.class) + public void testAddIgnoreNullNullColl() { + CollectionUtils.addIgnoreNull(null, "1"); + } + +====1 +1:1338,1349c + try { + CollectionUtils.predicatedCollection(new ArrayList(), null); + fail("Expecting NullPointerException for null predicate."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.predicatedCollection(null, predicate); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1523,1534c +3:1523,1534c + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullColl() { + final Predicate predicate = PredicateUtils.instanceofPredicate(Integer.class); + CollectionUtils.predicatedCollection(null, predicate); + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullPredicate() { + final Collection list = new ArrayList<>(); + CollectionUtils.predicatedCollection(list, null); +====1 +1:1358,1362c + try { + CollectionUtils.isFull(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1542a +3:1542a +====1 +1:1372a +2:1553,1557c +3:1553,1557c + @Test(expected = NullPointerException.class) + public void testIsFullNullColl() { + CollectionUtils.isFull(null); + } + +====1 +1:1385,1389c + try { + CollectionUtils.maxSize(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1569a +3:1569a +====1 +1:1399a +2:1580,1584c +3:1580,1584c + @Test(expected = NullPointerException.class) + public void testMaxSizeNullColl() { + CollectionUtils.maxSize(null); + } + +====1 +1:1466a +2:1652c +3:1652c + } +====1 +1:1468,1472c + try { + CollectionUtils.retainAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1654,1665c +3:1654,1665c + @Test(expected = NullPointerException.class) + public void testRetainAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.retainAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRetainAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.retainAll(base, null); +====1 +1:1477c + List list = new ArrayList<>(); +2:1670c +3:1670c + final List list = new ArrayList<>(); +====1 +1:1493c + @Test(expected=IllegalArgumentException.class) +2:1686c +3:1686c + @Test(expected=NullPointerException.class) +==== +1:1495,1496c + Collection list = null; + Collection result = CollectionUtils.removeRange(list, 0, 0); +2:1688,1697c + <<<<<<< HEAD + Collection list = null; + CollectionUtils.removeRange(list, 0, 0); + ||||||| 4551c3df1 + Collection list = null; + Collection result = CollectionUtils.removeRange(list, 0, 0); + ======= + final Collection list = null; + final Collection result = CollectionUtils.removeRange(list, 0, 0); + >>>>>>> TEMP_RIGHT_BRANCH +3:1688,1689c + final Collection list = null; + CollectionUtils.removeRange(list, 0, 0); +====1 +1:1501c + Collection list = new ArrayList<>(); +2:1702c +3:1694c + final Collection list = new ArrayList<>(); +==== +1:1503c + Collection result = CollectionUtils.removeRange(list, -1, 1); +2:1704,1710c + <<<<<<< HEAD + CollectionUtils.removeRange(list, -1, 1); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, -1, 1); + ======= + final Collection result = CollectionUtils.removeRange(list, -1, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1696c + CollectionUtils.removeRange(list, -1, 1); +====1 +1:1508c + Collection list = new ArrayList<>(); +2:1715c +3:1701c + final Collection list = new ArrayList<>(); +==== +1:1510c + Collection result = CollectionUtils.removeRange(list, 0, -1); +2:1717,1723c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 0, -1); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 0, -1); + ======= + final Collection result = CollectionUtils.removeRange(list, 0, -1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1703c + CollectionUtils.removeRange(list, 0, -1); +====1 +1:1515c + Collection list = new ArrayList<>(); +2:1728c +3:1708c + final Collection list = new ArrayList<>(); +==== +1:1518c + Collection result = CollectionUtils.removeRange(list, 1, 0); +2:1731,1737c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 1, 0); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 1, 0); + ======= + final Collection result = CollectionUtils.removeRange(list, 1, 0); + >>>>>>> TEMP_RIGHT_BRANCH +3:1711c + CollectionUtils.removeRange(list, 1, 0); +====1 +1:1523c + Collection list = new ArrayList<>(); +2:1742c +3:1716c + final Collection list = new ArrayList<>(); +==== +1:1525c + Collection result = CollectionUtils.removeRange(list, 0, 2); +2:1744,1750c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 0, 2); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 0, 2); + ======= + final Collection result = CollectionUtils.removeRange(list, 0, 2); + >>>>>>> TEMP_RIGHT_BRANCH +3:1718c + CollectionUtils.removeRange(list, 0, 2); +====1 +1:1530c + List list = new ArrayList<>(); +2:1755c +3:1723c + final List list = new ArrayList<>(); +====1 +1:1559c + @Test(expected=IllegalArgumentException.class) +2:1784c +3:1752c + @Test(expected=NullPointerException.class) +==== +1:1561,1562c + Collection list = null; + Collection result = CollectionUtils.removeCount(list, 0, 1); +2:1786,1795c + <<<<<<< HEAD + Collection list = null; + CollectionUtils.removeCount(list, 0, 1); + ||||||| 4551c3df1 + Collection list = null; + Collection result = CollectionUtils.removeCount(list, 0, 1); + ======= + final Collection list = null; + final Collection result = CollectionUtils.removeCount(list, 0, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1754,1755c + final Collection list = null; + CollectionUtils.removeCount(list, 0, 1); +==== +1:1567,1568c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, -1, 1); +2:1800,1809c + <<<<<<< HEAD + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); + ||||||| 4551c3df1 + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, -1, 1); + ======= + final Collection list = new ArrayList<>(); + final Collection result = CollectionUtils.removeCount(list, -1, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1760,1761c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); +==== +1:1573,1574c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, 0, -1); +2:1814,1823c + <<<<<<< HEAD + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); + ||||||| 4551c3df1 + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, 0, -1); + ======= + final Collection list = new ArrayList<>(); + final Collection result = CollectionUtils.removeCount(list, 0, -1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1766,1767c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); +====1 +1:1579c + Collection list = new ArrayList<>(); +2:1828c +3:1772c + final Collection list = new ArrayList<>(); +==== +1:1581c + Collection result = CollectionUtils.removeCount(list, 0, 2); +2:1830,1836c + <<<<<<< HEAD + CollectionUtils.removeCount(list, 0, 2); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeCount(list, 0, 2); + ======= + final Collection result = CollectionUtils.removeCount(list, 0, 2); + >>>>>>> TEMP_RIGHT_BRANCH +3:1774c + CollectionUtils.removeCount(list, 0, 2); +====1 +1:1607a +2:1863c +3:1801c + } +====1 +1:1609,1613c + try { + CollectionUtils.removeAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1865,1876c +3:1803,1814c + @Test(expected = NullPointerException.class) + public void testRemoveAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.removeAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRemoveAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.removeAll(base, null); +====1 +1:1622,1633c + try { + CollectionUtils.transformingCollection(new ArrayList<>(), null); + fail("Expecting NullPointerException for null transformer."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.transformingCollection(null, transformer); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1885,1896c +3:1823,1834c + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullColl() { + final Transformer transformer = TransformerUtils.nopTransformer(); + CollectionUtils.transformingCollection(null, transformer); + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullTransformer() { + final List list = new ArrayList<>(); + CollectionUtils.transformingCollection(list, null); +====1 +1:1740c + CollectionUtils.addAll(collectionA, new Integer[]{5}); +2:2003c +3:1941c + CollectionUtils.addAll(collectionA, 5); +==== +1:1744c + @Test(expected=IndexOutOfBoundsException.class) +2:2007,2047c + <<<<<<< HEAD + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected=IndexOutOfBoundsException.class) + ||||||| 4551c3df1 + @Test(expected=IndexOutOfBoundsException.class) + ======= + @Test(expected = IndexOutOfBoundsException.class) + >>>>>>> TEMP_RIGHT_BRANCH +3:1945,1979c + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected = IndexOutOfBoundsException.class) +====1 +1:1746c + CollectionUtils.get((Object)collectionA, -3); +2:2049c +3:1981c + CollectionUtils.get((Object) collectionA, -3); +====1 +1:1749c + @Test(expected=IndexOutOfBoundsException.class) +2:2052c +3:1984c + @Test(expected = IndexOutOfBoundsException.class) +====1 +1:1751c + CollectionUtils.get((Object)collectionA.iterator(), 30); +2:2054c +3:1986c + CollectionUtils.get((Object) collectionA.iterator(), 30); +====1 +1:1754c + @Test(expected=IllegalArgumentException.class) +2:2057c +3:1989c + @Test(expected = IllegalArgumentException.class) +====1 +1:1756c + CollectionUtils.get((Object)null, 0); +2:2059c +3:1991c + CollectionUtils.get((Object) null, 0); +====1 +1:1761,1762c + assertEquals(2, CollectionUtils.get((Object)collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object)collectionA.iterator(), 2)); +2:2064,2065c +3:1996,1997c + assertEquals(2, CollectionUtils.get((Object) collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object) collectionA.iterator(), 2)); +====1 +1:1764c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object)map, 0)); +2:2067c +3:1999c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object) map, 0)); +====1 +1:1795a +2:2099,2103c +3:2031,2035c + @Test(expected = NullPointerException.class) + public void testReverseArrayNull() { + CollectionUtils.reverseArray(null); + } + +====1 +1:1828a +2:2137,2141c +3:2069,2073c + public void collateException0() { + CollectionUtils.collate(null, collectionC); + } + + @Test(expected=NullPointerException.class) diff --git a/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_ignorespace/diff_CollectionUtils.java.txt b/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_ignorespace/diff_CollectionUtils.java.txt new file mode 100644 index 0000000000..dd4b5b6b97 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_ignorespace/diff_CollectionUtils.java.txt @@ -0,0 +1,985 @@ +====1 +1:30a +2:31c +3:31c + import java.util.Objects; +====1 +1:75c + public CardinalityHelper(final Iterable a, final Iterable b) { +2:76c +3:76c + CardinalityHelper(final Iterable a, final Iterable b) { +====1 +1:142c + public SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +2:143c +3:143c + SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +====1 +1:187c + * CollectionUtils should not normally be instantiated. +2:188c +3:188c + * {@code CollectionUtils} should not normally be instantiated. +====1 +1:205c + * Returns an immutable empty collection if the argument is null, +2:206c +3:206c + * Returns an immutable empty collection if the argument is {@code null}, +====1 +1:209,210c + * @param collection the collection, possibly null + * @return an empty collection if the argument is null +2:210,211c +3:210,211c + * @param collection the collection, possibly {@code null} + * @return an empty collection if the argument is {@code null} +====1 +1:232a +2:234,235c +3:234,235c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:257a +2:261,262c +3:261,262c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:287a +2:293,294c +3:293,294c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:340a +2:348,350c +3:348,350c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(p, "The predicate must not be null."); +====1 +1:357c + * Returns true iff all elements of {@code coll2} are also contained +2:367c +3:367c + * Returns {@code true} iff all elements of {@code coll2} are also contained +====1 +1:361c + * In other words, this method returns true iff the +2:371c +3:371c + * In other words, this method returns {@code true} iff the +====1 +1:376c + * @return true iff the intersection of the collections has the same cardinality +2:386c +3:386c + * @return {@code true} iff the intersection of the collections has the same cardinality +====1 +1:380a +2:391,392c +3:391,392c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====1 +1:409c + * Returns true iff at least one element is in both collections. +2:421c +3:421c + * Returns {@code true} iff at least one element is in both collections. +====1 +1:411c + * In other words, this method returns true iff the +2:423c +3:423c + * In other words, this method returns {@code true} iff the +====1 +1:415c + * @param the type of object to lookup in coll1. +2:427c +3:427c + * @param the type of object to lookup in {@code coll1}. +====1 +1:418c + * @return true iff the intersection of the collections is non-empty +2:430c +3:430c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:422a +2:435,436c +3:435,436c + Objects.requireNonNull(coll1, "The collection must not be null."); + Objects.requireNonNull(coll2, "The elements must not be null."); +====1 +1:440c + * Returns true iff at least one element is in both collections. +2:454c +3:454c + * Returns {@code true} iff at least one element is in both collections. +====1 +1:442c + * In other words, this method returns true iff the +2:456c +3:456c + * In other words, this method returns {@code true} iff the +====1 +1:448c + * @return true iff the intersection of the collections is non-empty +2:462c +3:462c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:452a +2:467,468c +3:467,468c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====1 +1:482a +2:499c +3:499c + Objects.requireNonNull(coll, "The collection must not be null."); +====1 +1:503c + * @return true iff a is a sub-collection of b +2:520c +3:520c + * @return {@code true} iff a is a sub-collection of b +====1 +1:507a +2:525,526c +3:525,526c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:528c + *
  • a.size() and b.size() represent the +2:547c +3:547c + *
  • {@code a.size()} and {@code b.size()} represent the +====1 +1:530c + *
  • a.size() < Integer.MAXVALUE
  • +2:549c +3:549c + *
  • {@code a.size() < Integer.MAXVALUE}
  • +====1 +1:535c + * @return true iff a is a proper sub-collection of b +2:554c +3:554c + * @return {@code true} iff a is a proper sub-collection of b +====1 +1:539a +2:559,560c +3:559,560c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:554c + * @return true iff the collections contain the same elements with the same cardinalities. +2:575c +3:575c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:557c + if(a.size() != b.size()) { +2:578,586c + <<<<<<< HEAD + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if(a.size() != b.size()) { + ||||||| 4551c3df1 + if(a.size() != b.size()) { + ======= + if (a.size() != b.size()) { + >>>>>>> TEMP_RIGHT_BRANCH +3:578,580c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if (a.size() != b.size()) { +====1 +1:561c + if(helper.cardinalityA.size() != helper.cardinalityB.size()) { +2:590c +3:584c + if (helper.cardinalityA.size() != helper.cardinalityB.size()) { +====1 +1:564,565c + for( final Object obj : helper.cardinalityA.keySet()) { + if(helper.freqA(obj) != helper.freqB(obj)) { +2:593,594c +3:587,588c + for (final Object obj : helper.cardinalityA.keySet()) { + if (helper.freqA(obj) != helper.freqB(obj)) { +====1 +1:591c + * @return true iff the collections contain the same elements with the same cardinalities. +2:620c +3:614c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:598,600c + if (equator == null) { + throw new NullPointerException("Equator must not be null."); + } +2:627,639c + <<<<<<< HEAD + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); + if(a.size() != b.size()) { + ||||||| 4551c3df1 + if (equator == null) { + throw new NullPointerException("Equator must not be null."); + } + + if(a.size() != b.size()) { + ======= + Objects.requireNonNull(equator, "equator"); +3:621,623c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "equator"); +==== +1:602c + if(a.size() != b.size()) { +2:641,642c + if (a.size() != b.size()) { + >>>>>>> TEMP_RIGHT_BRANCH +3:625c + if (a.size() != b.size()) { +====1 +1:626c + public EquatorWrapper(final Equator equator, final O object) { +2:666c +3:649c + EquatorWrapper(final Equator equator, final O object) { +====1 +1:655c + * @param coll the {@link Iterable} to search +2:695c +3:678c + * @param collection the {@link Iterable} to search +==== +1:663,667c + public static int cardinality(final O obj, final Iterable coll) { + if (coll == null) { + throw new NullPointerException("coll must not be null."); + } + return IterableUtils.frequency(coll, obj); +2:703,716c + <<<<<<< HEAD + public static int cardinality(final O obj, final Iterable coll) { + Objects.requireNonNull(coll, "The collection must not be null."); + return IterableUtils.frequency(coll, obj); + ||||||| 4551c3df1 + public static int cardinality(final O obj, final Iterable coll) { + if (coll == null) { + throw new NullPointerException("coll must not be null."); + } + return IterableUtils.frequency(coll, obj); + ======= + public static int cardinality(final O obj, final Iterable collection) { + return IterableUtils.frequency(Objects.requireNonNull(collection, "collection"), obj); + >>>>>>> TEMP_RIGHT_BRANCH +3:686,687c + public static int cardinality(final O obj, final Iterable collection) { + return IterableUtils.frequency(Objects.requireNonNull(collection, "collection"), obj); +====1 +1:799c + * This is equivalent to filter(collection, PredicateUtils.notPredicate(predicate)) +2:848c +3:819c + * This is equivalent to {@code filter(collection, PredicateUtils.notPredicate(predicate))} +====1 +1:856c + * A null collection or predicate matches no elements. +2:905c +3:876c + * A {@code null} collection or predicate matches no elements. +====1 +1:874c + * A null collection or predicate returns false. +2:923c +3:894c + * A {@code null} collection or predicate returns false. +====1 +1:893c + * A null predicate returns false. +2:942c +3:913c + * A {@code null} predicate returns false. +====1 +1:896c + * A null or empty collection returns true. +2:945c +3:916c + * A {@code null} or empty collection returns true. +====1 +1:916c + * A null predicate matches no elements. +2:965c +3:936c + * A {@code null} predicate matches no elements. +====1 +1:923c + * @throws NullPointerException if the input collection is null +2:971a +3:942a +====1 +1:927,928c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:975,979c +3:946,950c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====1 +1:965,966c + * Elements matching the predicate are added to the outputCollection, + * all other elements are added to the rejectedCollection. +2:1016,1017c +3:987,988c + * Elements matching the predicate are added to the {@code outputCollection}, + * all other elements are added to the {@code rejectedCollection}. +====1 +1:969,970c + * If the input predicate is null, no elements are added to + * outputCollection or rejectedCollection. +2:1020,1021c +3:991,992c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection} or {@code rejectedCollection}. +====1 +1:1010c + * If the input predicate is null, the result is an empty +2:1061c +3:1032c + * If the input predicate is {@code null}, the result is an empty +====1 +1:1018c + * @throws NullPointerException if the input collection is null +2:1068a +3:1039a +====1 +1:1022,1023c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1072,1076c +3:1043,1047c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====1 +1:1031,1032c + * If the input predicate is null, no elements are added to + * outputCollection. +2:1084,1085c +3:1055,1056c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection}. +====1 +1:1072,1073c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1125,1129c +3:1096,1100c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +==== +1:1165,1167c + if (collection == null) { + throw new NullPointerException("The collection must not be null"); + } +2:1221,1229c + <<<<<<< HEAD + Objects.requireNonNull(collection, "The collection must not be null."); + ||||||| 4551c3df1 + if (collection == null) { + throw new NullPointerException("The collection must not be null"); + } + ======= + Objects.requireNonNull(collection, "collection"); + >>>>>>> TEMP_RIGHT_BRANCH +3:1192c + Objects.requireNonNull(collection, "collection"); +====1 +1:1182a +2:1245,1246c +3:1208,1209c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterable, "The iterable of elements to add must not be null."); +====1 +1:1198a +2:1263,1264c +3:1226,1227c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterator, "The iterator of elements to add must not be null."); +====1 +1:1215a +2:1282,1283c +3:1245,1246c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(enumeration, "The enumeration of elements to add must not be null."); +====1 +1:1232a +2:1301,1302c +3:1264,1265c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(elements, "The array of elements to add must not be null."); +====1 +1:1241,1242c + * Returns the index-th value in {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +2:1311,1312c +3:1274,1275c + * Returns the {@code index}-th value in {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1244,1245c + * The Iterator is advanced to index (or to the end, if + * index exceeds the number of entries) as a side effect of this method. +2:1314,1315c +3:1277,1278c + * The Iterator is advanced to {@code index} (or to the end, if + * {@code index} exceeds the number of entries) as a side effect of this method. +====1 +1:1257a +2:1328c +3:1291c + Objects.requireNonNull(iterator, "The iterator must not be null."); +====1 +1:1273,1274c + * Returns the index-th value in the iterable's {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +2:1344,1345c +3:1307,1308c + * Returns the {@code index}-th value in the {@code iterable}'s {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1287a +2:1359c +3:1322c + Objects.requireNonNull(iterable, "The iterable must not be null."); +====1 +1:1292,1294c + * Returns the index-th value in object, throwing + * IndexOutOfBoundsException if there is no such element or + * IllegalArgumentException if object is not an +2:1364,1366c +3:1327,1329c + * Returns the {@code index}-th value in {@code object}, throwing + * {@code IndexOutOfBoundsException} if there is no such element or + * {@code IllegalArgumentException} if {@code object} is not an +====1 +1:1300,1301c + *
  • Map -- the value returned is the Map.Entry in position + * index in the map's entrySet iterator, +2:1372,1373c +3:1335,1336c + *
  • Map -- the value returned is the {@code Map.Entry} in position + * {@code index} in the map's {@code entrySet} iterator, +====1 +1:1304,1305c + *
  • Array -- the index-th array entry is returned, + * if there is such an entry; otherwise an IndexOutOfBoundsException +2:1376,1377c +3:1339,1340c + *
  • Array -- the {@code index}-th array entry is returned, + * if there is such an entry; otherwise an {@code IndexOutOfBoundsException} +====1 +1:1307c + *
  • Collection -- the value returned is the index-th object +2:1379c +3:1342c + *
  • Collection -- the value returned is the {@code index}-th object +====1 +1:1310c + * index-th object in the Iterator/Enumeration, if there +2:1382c +3:1345c + * {@code index}-th object in the Iterator/Enumeration, if there +====1 +1:1312c + * index (or to the end, if index exceeds the +2:1384c +3:1347c + * {@code index} (or to the end, if {@code index} exceeds the +====1 +1:1327c + if (object instanceof Map) { +2:1399c +3:1362c + if (object instanceof Map) { +====1 +1:1354,1355c + * Returns the index-th Map.Entry in the map's entrySet, + * throwing IndexOutOfBoundsException if there is no such element. +2:1426,1427c +3:1389,1390c + * Returns the {@code index}-th {@code Map.Entry} in the {@code map}'s {@code entrySet}, + * throwing {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1358c + * @param the key type in the {@link Map} +2:1430c +3:1393c + * @param the value type in the {@link Map} +==== +1:1364c + public static Map.Entry get(final Map map, final int index) { +2:1436,1442c + public static Map.Entry get(final Map map, final int index) { + <<<<<<< HEAD + Objects.requireNonNull(map, "The map must not be null."); + ||||||| 4551c3df1 + public static Map.Entry get(final Map map, final int index) { + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:1399,1400c + public static Map.Entry get(final Map map, final int index) { + Objects.requireNonNull(map, "The map must not be null."); +====1 +1:1392c + if (object instanceof Map) { +2:1470c +3:1428c + if (object instanceof Map) { +====1 +1:1499a +2:1578c +3:1536c + Objects.requireNonNull(array, "The array must not be null."); +====1 +1:1526c + * @param coll the collection to check +2:1605c +3:1563c + * @param collection the collection to check +==== +1:1530,1535c + public static boolean isFull(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); +2:1609,1626c + <<<<<<< HEAD + public static boolean isFull(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); + ||||||| 4551c3df1 + public static boolean isFull(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); + ======= + public static boolean isFull(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).isFull(); + >>>>>>> TEMP_RIGHT_BRANCH +3:1567,1570c + public static boolean isFull(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).isFull(); +====1 +1:1539c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +2:1630c +3:1574c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====1 +1:1559c + * @param coll the collection to check +2:1650c +3:1594c + * @param collection the collection to check +==== +1:1563,1568c + public static int maxSize(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); +2:1654,1671c + <<<<<<< HEAD + public static int maxSize(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); + ||||||| 4551c3df1 + public static int maxSize(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); + ======= + public static int maxSize(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).maxSize(); + >>>>>>> TEMP_RIGHT_BRANCH +3:1598,1601c + public static int maxSize(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).maxSize(); +====1 +1:1572c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +2:1675c +3:1605c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====1 +1:1649,1651c + * @param a the first collection, must not be null + * @param b the second collection, must not be null + * @param c the comparator to use for the merge. +2:1752,1754c +3:1682,1684c + * @param iterableA the first collection, must not be null + * @param iterableB the second collection, must not be null + * @param comparator the comparator to use for the merge. +==== +1:1658,1659c + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { +2:1761,1779c + <<<<<<< HEAD + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(c, "The comparator must not be null."); + ||||||| 4551c3df1 + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + + if (a == null || b == null) { + throw new NullPointerException("The collections must not be null"); + } + if (c == null) { + throw new NullPointerException("The comparator must not be null"); + } + ======= + public static List collate(final Iterable iterableA, final Iterable iterableB, + final Comparator comparator, final boolean includeDuplicates) { +3:1691,1692c + public static List collate(final Iterable iterableA, final Iterable iterableB, + final Comparator comparator, final boolean includeDuplicates) { +==== +1:1661,1666c + if (a == null || b == null) { + throw new NullPointerException("The collections must not be null"); + } + if (c == null) { + throw new NullPointerException("The comparator must not be null"); + } +2:1781,1784c + Objects.requireNonNull(iterableA, "iterableA"); + Objects.requireNonNull(iterableB, "iterableB"); + Objects.requireNonNull(comparator, "comparator"); + >>>>>>> TEMP_RIGHT_BRANCH +3:1694,1696c + Objects.requireNonNull(iterableA, "iterableA"); + Objects.requireNonNull(iterableB, "iterableB"); + Objects.requireNonNull(comparator, "comparator"); +====1 +1:1669,1670c + final int totalSize = a instanceof Collection && b instanceof Collection ? + Math.max(1, ((Collection) a).size() + ((Collection) b).size()) : 10; +2:1787,1788c +3:1699,1700c + final int totalSize = iterableA instanceof Collection && iterableB instanceof Collection ? + Math.max(1, ((Collection) iterableA).size() + ((Collection) iterableB).size()) : 10; +====1 +1:1672c + final Iterator iterator = new CollatingIterator<>(c, a.iterator(), b.iterator()); +2:1790c +3:1702c + final Iterator iterator = new CollatingIterator<>(comparator, iterableA.iterator(), iterableB.iterator()); +====1 +1:1713a +2:1832c +3:1744c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1724,1727c + * Returns a collection containing all the elements in collection + * that are also in retain. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless retain does not contain e, in which +2:1843,1846c +3:1755,1758c + * Returns a collection containing all the elements in {@code collection} + * that are also in {@code retain}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code retain} does not contain {@code e}, in which +====1 +1:1729c + * the collection c and thus cannot call c.retainAll(retain);. +2:1848c +3:1760c + * the collection {@code c} and thus cannot call {@code c.retainAll(retain);}. +====1 +1:1731,1732c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in retain. If it's contained, it's added +2:1850,1851c +3:1762,1763c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code retain}. If it's contained, it's added +====1 +1:1734c + * retain that provides a fast (e.g. O(1)) implementation of +2:1853c +3:1765c + * {@code retain} that provides a fast (e.g. O(1)) implementation of +====1 +1:1741,1742c + * @return a Collection containing all the elements of collection + * that occur at least once in retain. +2:1860,1861c +3:1772,1773c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain}. +====1 +1:1746a +2:1866,1867c +3:1778,1779c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); +====1 +1:1752,1755c + * collection that are also in retain. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless retain does not contain e, in which case +2:1873,1876c +3:1785,1788c + * {@code collection} that are also in {@code retain}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code retain} does not contain {@code e}, in which case +====1 +1:1757,1758c + * modify the collection c and thus cannot call + * c.retainAll(retain);. +2:1878,1879c +3:1790,1791c + * modify the collection {@code c} and thus cannot call + * {@code c.retainAll(retain);}. +====1 +1:1762c + * in collection and retain. Hence this method is +2:1883c +3:1795c + * in {@code collection} and {@code retain}. Hence this method is +====1 +1:1771,1772c + * @return a Collection containing all the elements of collection + * that occur at least once in retain according to the equator +2:1892,1893c +3:1804,1805c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain} according to the {@code equator} +====1 +1:1779c + +2:1900,1902c +3:1812,1814c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1800,1802c + * @param input the collection will be operated, can't be null + * @param startIndex the start index (inclusive) to remove element, can't be less than 0 + * @param endIndex the end index (exclusive) to remove, can't be less than startIndex +2:1923,1925c +3:1835,1837c + * @param input the collection will be operated, must not be null + * @param startIndex the start index (inclusive) to remove element, must not be less than 0 + * @param endIndex the end index (exclusive) to remove, must not be less than startIndex +====1 +1:1807,1809c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1930c +3:1842c + Objects.requireNonNull(input, "The collection must not be null."); +====1 +1:1831,1833c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1952c +3:1864c + Objects.requireNonNull(input, "The collection must not be null."); +====1 +1:1845,1846c + Collection result = new ArrayList(count); + Iterator iterator = input.iterator(); +2:1964,1965c +3:1876,1877c + final Collection result = new ArrayList<>(count); + final Iterator iterator = input.iterator(); +====1 +1:1861,1865c + * Removes the elements in remove from collection. That is, this + * method returns a collection containing all the elements in c + * that are not in remove. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless remove contains e, in which +2:1980,1984c +3:1892,1896c + * Removes the elements in {@code remove} from {@code collection}. That is, this + * method returns a collection containing all the elements in {@code c} + * that are not in {@code remove}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code remove} contains {@code e}, in which +====1 +1:1867c + * the collection c and thus cannot call collection.removeAll(remove);. +2:1986c +3:1898c + * the collection {@code c} and thus cannot call {@code collection.removeAll(remove);}. +====1 +1:1869,1870c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in remove. If it's not contained, it's added +2:1988,1989c +3:1900,1901c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code remove}. If it's not contained, it's added +====1 +1:1872c + * remove that provides a fast (e.g. O(1)) implementation of +2:1991c +3:1903c + * {@code remove} that provides a fast (e.g. O(1)) implementation of +====1 +1:1878,1880c + * @param remove the items to be removed from the returned collection + * @return a Collection containing all the elements of collection except + * any elements that also occur in remove. +2:1997,1999c +3:1909,1911c + * @param remove the items to be removed from the returned {@code collection} + * @return a {@code Collection} containing all the elements of {@code collection} except + * any elements that also occur in {@code remove}. +====3 +1:1886c +2:2005c + } +3:1917c + } +====1 +1:1889c + * Removes all elements in remove from collection. +2:2008c +3:1920c + * Removes all elements in {@code remove} from {@code collection}. +====1 +1:1891,1894c + * collection that are not in remove. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless remove contains e, in which case the +2:2010,2013c +3:1922,1925c + * {@code collection} that are not in {@code remove}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code remove} contains {@code e}, in which case the +====1 +1:1896,1897c + * the collection c and thus cannot call + * collection.removeAll(remove). +2:2015,2016c +3:1927,1928c + * the collection {@code c} and thus cannot call + * {@code collection.removeAll(remove)}. +====1 +1:1901c + * in collection and remove. Hence this method is +2:2020c +3:1932c + * in {@code collection} and {@code remove}. Hence this method is +====1 +1:1910,1911c + * @return a Collection containing all the elements of collection + * except any element that if equal according to the equator +2:2029,2030c +3:1941,1942c + * @return a {@code Collection} containing all the elements of {@code collection} + * except any element that if equal according to the {@code equator} +====1 +1:1918c + +2:2037,2039c +3:1949,1951c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(remove, "The items to be removed must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1960a +2:2082c +3:1994c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1977a +2:2100c +3:2012c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1997a +2:2121,2122c +3:2033,2034c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(predicate, "The predicate must not be null."); +====1 +1:2020a +2:2146,2147c +3:2058,2059c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(transformer, "The transformer must not be null."); +==== +1:2035,2037c + if (collection == null) { + throw new NullPointerException("Collection must not be null."); + } +2:2162,2170c + <<<<<<< HEAD + Objects.requireNonNull(collection, "The collection must not be null."); + ||||||| 4551c3df1 + if (collection == null) { + throw new NullPointerException("Collection must not be null."); + } + ======= + Objects.requireNonNull(collection, "collection"); + >>>>>>> TEMP_RIGHT_BRANCH +3:2074c + Objects.requireNonNull(collection, "collection"); diff --git a/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_ignorespace/diff_CollectionUtilsTest.java.txt b/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_ignorespace/diff_CollectionUtilsTest.java.txt new file mode 100644 index 0000000000..b98dcdbd96 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_ignorespace/diff_CollectionUtilsTest.java.txt @@ -0,0 +1,725 @@ +====1 +1:193a +2:194,198c +3:194,198c + @Test(expected = NullPointerException.class) + public void testGetCardinalityMapNull() { + CollectionUtils.getCardinalityMap(null); + } + +====1 +1:349a +2:355,377c +3:355,377c + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.containsAny(null, list); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl3() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:383a +2:412,433c +3:412,433c + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl1() { + final String[] oneArr = {"1"}; + CollectionUtils.containsAny(null, oneArr); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullArray() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:402a +2:453,466c +3:453,466c + @Test(expected = NullPointerException.class) + public void testUnionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(null, list); + } + + @Test(expected = NullPointerException.class) + public void testUnionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(list, null); + } + +====1 +1:421a +2:486,499c +3:486,499c + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(list, null); + } + +====1 +1:440a +2:519,532c +3:519,532c + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(null, list); + } + + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(list, null); + } + +====1 +1:475a +2:568,581c +3:568,581c + @Test(expected = NullPointerException.class) + public void testSubtractNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(null, list); + } + + @Test(expected = NullPointerException.class) + public void testSubtractNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(list, null); + } + +====1 +1:541a +2:648,661c +3:648,661c + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(list, null); + } + +====1 +1:623a +2:744,789c +3:744,789c + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(null, list, e); + } + + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(list, null, e); + } + +====1 +1:645a +2:812,825c +3:812,825c + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(list, null); + } + +====1 +1:655c + assertNull(CollectionUtils.find(null,testPredicate)); +2:835c +3:835c + assertNull(CollectionUtils.find(null, testPredicate)); +====1 +1:1279c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long)input).intValue()); +2:1459c +3:1459c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long) input).intValue()); +====1 +1:1331a +2:1512,1516c +3:1512,1516c + @Test(expected = NullPointerException.class) + public void testAddIgnoreNullNullColl() { + CollectionUtils.addIgnoreNull(null, "1"); + } + +====1 +1:1338,1349c + try { + CollectionUtils.predicatedCollection(new ArrayList(), null); + fail("Expecting NullPointerException for null predicate."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.predicatedCollection(null, predicate); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1523,1534c +3:1523,1534c + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullColl() { + final Predicate predicate = PredicateUtils.instanceofPredicate(Integer.class); + CollectionUtils.predicatedCollection(null, predicate); + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullPredicate() { + final Collection list = new ArrayList<>(); + CollectionUtils.predicatedCollection(list, null); +====1 +1:1358,1362c + try { + CollectionUtils.isFull(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1542a +3:1542a +====1 +1:1372a +2:1553,1557c +3:1553,1557c + @Test(expected = NullPointerException.class) + public void testIsFullNullColl() { + CollectionUtils.isFull(null); + } + +====1 +1:1385,1389c + try { + CollectionUtils.maxSize(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1569a +3:1569a +====1 +1:1399a +2:1580,1584c +3:1580,1584c + @Test(expected = NullPointerException.class) + public void testMaxSizeNullColl() { + CollectionUtils.maxSize(null); + } + +====1 +1:1466a +2:1652c +3:1652c + } +====1 +1:1468,1472c + try { + CollectionUtils.retainAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1654,1665c +3:1654,1665c + @Test(expected = NullPointerException.class) + public void testRetainAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.retainAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRetainAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.retainAll(base, null); +====1 +1:1477c + List list = new ArrayList<>(); +2:1670c +3:1670c + final List list = new ArrayList<>(); +====1 +1:1493c + @Test(expected=IllegalArgumentException.class) +2:1686c +3:1686c + @Test(expected=NullPointerException.class) +==== +1:1495,1496c + Collection list = null; + Collection result = CollectionUtils.removeRange(list, 0, 0); +2:1688,1697c + <<<<<<< HEAD + Collection list = null; + CollectionUtils.removeRange(list, 0, 0); + ||||||| 4551c3df1 + Collection list = null; + Collection result = CollectionUtils.removeRange(list, 0, 0); + ======= + final Collection list = null; + final Collection result = CollectionUtils.removeRange(list, 0, 0); + >>>>>>> TEMP_RIGHT_BRANCH +3:1688,1689c + final Collection list = null; + CollectionUtils.removeRange(list, 0, 0); +====1 +1:1501c + Collection list = new ArrayList<>(); +2:1702c +3:1694c + final Collection list = new ArrayList<>(); +==== +1:1503c + Collection result = CollectionUtils.removeRange(list, -1, 1); +2:1704,1710c + <<<<<<< HEAD + CollectionUtils.removeRange(list, -1, 1); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, -1, 1); + ======= + final Collection result = CollectionUtils.removeRange(list, -1, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1696c + CollectionUtils.removeRange(list, -1, 1); +====1 +1:1508c + Collection list = new ArrayList<>(); +2:1715c +3:1701c + final Collection list = new ArrayList<>(); +==== +1:1510c + Collection result = CollectionUtils.removeRange(list, 0, -1); +2:1717,1723c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 0, -1); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 0, -1); + ======= + final Collection result = CollectionUtils.removeRange(list, 0, -1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1703c + CollectionUtils.removeRange(list, 0, -1); +====1 +1:1515c + Collection list = new ArrayList<>(); +2:1728c +3:1708c + final Collection list = new ArrayList<>(); +==== +1:1518c + Collection result = CollectionUtils.removeRange(list, 1, 0); +2:1731,1737c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 1, 0); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 1, 0); + ======= + final Collection result = CollectionUtils.removeRange(list, 1, 0); + >>>>>>> TEMP_RIGHT_BRANCH +3:1711c + CollectionUtils.removeRange(list, 1, 0); +====1 +1:1523c + Collection list = new ArrayList<>(); +2:1742c +3:1716c + final Collection list = new ArrayList<>(); +==== +1:1525c + Collection result = CollectionUtils.removeRange(list, 0, 2); +2:1744,1750c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 0, 2); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 0, 2); + ======= + final Collection result = CollectionUtils.removeRange(list, 0, 2); + >>>>>>> TEMP_RIGHT_BRANCH +3:1718c + CollectionUtils.removeRange(list, 0, 2); +====1 +1:1530c + List list = new ArrayList<>(); +2:1755c +3:1723c + final List list = new ArrayList<>(); +====1 +1:1559c + @Test(expected=IllegalArgumentException.class) +2:1784c +3:1752c + @Test(expected=NullPointerException.class) +==== +1:1561,1562c + Collection list = null; + Collection result = CollectionUtils.removeCount(list, 0, 1); +2:1786,1795c + <<<<<<< HEAD + Collection list = null; + CollectionUtils.removeCount(list, 0, 1); + ||||||| 4551c3df1 + Collection list = null; + Collection result = CollectionUtils.removeCount(list, 0, 1); + ======= + final Collection list = null; + final Collection result = CollectionUtils.removeCount(list, 0, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1754,1755c + final Collection list = null; + CollectionUtils.removeCount(list, 0, 1); +==== +1:1567,1568c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, -1, 1); +2:1800,1809c + <<<<<<< HEAD + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); + ||||||| 4551c3df1 + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, -1, 1); + ======= + final Collection list = new ArrayList<>(); + final Collection result = CollectionUtils.removeCount(list, -1, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1760,1761c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); +==== +1:1573,1574c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, 0, -1); +2:1814,1823c + <<<<<<< HEAD + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); + ||||||| 4551c3df1 + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, 0, -1); + ======= + final Collection list = new ArrayList<>(); + final Collection result = CollectionUtils.removeCount(list, 0, -1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1766,1767c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); +====1 +1:1579c + Collection list = new ArrayList<>(); +2:1828c +3:1772c + final Collection list = new ArrayList<>(); +==== +1:1581c + Collection result = CollectionUtils.removeCount(list, 0, 2); +2:1830,1836c + <<<<<<< HEAD + CollectionUtils.removeCount(list, 0, 2); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeCount(list, 0, 2); + ======= + final Collection result = CollectionUtils.removeCount(list, 0, 2); + >>>>>>> TEMP_RIGHT_BRANCH +3:1774c + CollectionUtils.removeCount(list, 0, 2); +====1 +1:1607a +2:1863c +3:1801c + } +====1 +1:1609,1613c + try { + CollectionUtils.removeAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1865,1876c +3:1803,1814c + @Test(expected = NullPointerException.class) + public void testRemoveAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.removeAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRemoveAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.removeAll(base, null); +====1 +1:1622,1633c + try { + CollectionUtils.transformingCollection(new ArrayList<>(), null); + fail("Expecting NullPointerException for null transformer."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.transformingCollection(null, transformer); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1885,1896c +3:1823,1834c + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullColl() { + final Transformer transformer = TransformerUtils.nopTransformer(); + CollectionUtils.transformingCollection(null, transformer); + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullTransformer() { + final List list = new ArrayList<>(); + CollectionUtils.transformingCollection(list, null); +====1 +1:1740c + CollectionUtils.addAll(collectionA, new Integer[]{5}); +2:2003c +3:1941c + CollectionUtils.addAll(collectionA, 5); +==== +1:1744c + @Test(expected=IndexOutOfBoundsException.class) +2:2007,2047c + <<<<<<< HEAD + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected=IndexOutOfBoundsException.class) + ||||||| 4551c3df1 + @Test(expected=IndexOutOfBoundsException.class) + ======= + @Test(expected = IndexOutOfBoundsException.class) + >>>>>>> TEMP_RIGHT_BRANCH +3:1945,1979c + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected = IndexOutOfBoundsException.class) +====1 +1:1746c + CollectionUtils.get((Object)collectionA, -3); +2:2049c +3:1981c + CollectionUtils.get((Object) collectionA, -3); +====1 +1:1749c + @Test(expected=IndexOutOfBoundsException.class) +2:2052c +3:1984c + @Test(expected = IndexOutOfBoundsException.class) +====1 +1:1751c + CollectionUtils.get((Object)collectionA.iterator(), 30); +2:2054c +3:1986c + CollectionUtils.get((Object) collectionA.iterator(), 30); +====1 +1:1754c + @Test(expected=IllegalArgumentException.class) +2:2057c +3:1989c + @Test(expected = IllegalArgumentException.class) +====1 +1:1756c + CollectionUtils.get((Object)null, 0); +2:2059c +3:1991c + CollectionUtils.get((Object) null, 0); +====1 +1:1761,1762c + assertEquals(2, CollectionUtils.get((Object)collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object)collectionA.iterator(), 2)); +2:2064,2065c +3:1996,1997c + assertEquals(2, CollectionUtils.get((Object) collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object) collectionA.iterator(), 2)); +====1 +1:1764c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object)map, 0)); +2:2067c +3:1999c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object) map, 0)); +====1 +1:1795a +2:2099,2103c +3:2031,2035c + @Test(expected = NullPointerException.class) + public void testReverseArrayNull() { + CollectionUtils.reverseArray(null); + } + +====1 +1:1828a +2:2137,2141c +3:2069,2073c + public void collateException0() { + CollectionUtils.collate(null, collectionC); + } + + @Test(expected=NullPointerException.class) diff --git a/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_minimal/diff_CollectionUtils.java.txt b/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_minimal/diff_CollectionUtils.java.txt new file mode 100644 index 0000000000..a61e64f7d0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_minimal/diff_CollectionUtils.java.txt @@ -0,0 +1,985 @@ +====1 +1:30a +2:31c +3:31c + import java.util.Objects; +====1 +1:75c + public CardinalityHelper(final Iterable a, final Iterable b) { +2:76c +3:76c + CardinalityHelper(final Iterable a, final Iterable b) { +====1 +1:142c + public SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +2:143c +3:143c + SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +====1 +1:187c + * CollectionUtils should not normally be instantiated. +2:188c +3:188c + * {@code CollectionUtils} should not normally be instantiated. +====1 +1:205c + * Returns an immutable empty collection if the argument is null, +2:206c +3:206c + * Returns an immutable empty collection if the argument is {@code null}, +====1 +1:209,210c + * @param collection the collection, possibly null + * @return an empty collection if the argument is null +2:210,211c +3:210,211c + * @param collection the collection, possibly {@code null} + * @return an empty collection if the argument is {@code null} +====1 +1:232a +2:234,235c +3:234,235c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:257a +2:261,262c +3:261,262c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:287a +2:293,294c +3:293,294c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:340a +2:348,350c +3:348,350c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(p, "The predicate must not be null."); +====1 +1:357c + * Returns true iff all elements of {@code coll2} are also contained +2:367c +3:367c + * Returns {@code true} iff all elements of {@code coll2} are also contained +====1 +1:361c + * In other words, this method returns true iff the +2:371c +3:371c + * In other words, this method returns {@code true} iff the +====1 +1:376c + * @return true iff the intersection of the collections has the same cardinality +2:386c +3:386c + * @return {@code true} iff the intersection of the collections has the same cardinality +====1 +1:380a +2:391,392c +3:391,392c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====1 +1:409c + * Returns true iff at least one element is in both collections. +2:421c +3:421c + * Returns {@code true} iff at least one element is in both collections. +====1 +1:411c + * In other words, this method returns true iff the +2:423c +3:423c + * In other words, this method returns {@code true} iff the +====1 +1:415c + * @param the type of object to lookup in coll1. +2:427c +3:427c + * @param the type of object to lookup in {@code coll1}. +====1 +1:418c + * @return true iff the intersection of the collections is non-empty +2:430c +3:430c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:422a +2:435,436c +3:435,436c + Objects.requireNonNull(coll1, "The collection must not be null."); + Objects.requireNonNull(coll2, "The elements must not be null."); +====1 +1:440c + * Returns true iff at least one element is in both collections. +2:454c +3:454c + * Returns {@code true} iff at least one element is in both collections. +====1 +1:442c + * In other words, this method returns true iff the +2:456c +3:456c + * In other words, this method returns {@code true} iff the +====1 +1:448c + * @return true iff the intersection of the collections is non-empty +2:462c +3:462c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:452a +2:467,468c +3:467,468c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====1 +1:482a +2:499c +3:499c + Objects.requireNonNull(coll, "The collection must not be null."); +====1 +1:503c + * @return true iff a is a sub-collection of b +2:520c +3:520c + * @return {@code true} iff a is a sub-collection of b +====1 +1:507a +2:525,526c +3:525,526c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:528c + *
  • a.size() and b.size() represent the +2:547c +3:547c + *
  • {@code a.size()} and {@code b.size()} represent the +====1 +1:530c + *
  • a.size() < Integer.MAXVALUE
  • +2:549c +3:549c + *
  • {@code a.size() < Integer.MAXVALUE}
  • +====1 +1:535c + * @return true iff a is a proper sub-collection of b +2:554c +3:554c + * @return {@code true} iff a is a proper sub-collection of b +====1 +1:539a +2:559,560c +3:559,560c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:554c + * @return true iff the collections contain the same elements with the same cardinalities. +2:575c +3:575c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:557c + if(a.size() != b.size()) { +2:578,586c + <<<<<<< HEAD + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if(a.size() != b.size()) { + ||||||| 4551c3df1 + if(a.size() != b.size()) { + ======= + if (a.size() != b.size()) { + >>>>>>> TEMP_RIGHT_BRANCH +3:578,580c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if (a.size() != b.size()) { +====1 +1:561c + if(helper.cardinalityA.size() != helper.cardinalityB.size()) { +2:590c +3:584c + if (helper.cardinalityA.size() != helper.cardinalityB.size()) { +====1 +1:564,565c + for( final Object obj : helper.cardinalityA.keySet()) { + if(helper.freqA(obj) != helper.freqB(obj)) { +2:593,594c +3:587,588c + for (final Object obj : helper.cardinalityA.keySet()) { + if (helper.freqA(obj) != helper.freqB(obj)) { +====1 +1:591c + * @return true iff the collections contain the same elements with the same cardinalities. +2:620c +3:614c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:598,600c + if (equator == null) { + throw new NullPointerException("Equator must not be null."); + } +2:627,639c + <<<<<<< HEAD + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); + if(a.size() != b.size()) { + ||||||| 4551c3df1 + if (equator == null) { + throw new NullPointerException("Equator must not be null."); + } + + if(a.size() != b.size()) { + ======= + Objects.requireNonNull(equator, "equator"); +3:621,623c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "equator"); +==== +1:602c + if(a.size() != b.size()) { +2:641,642c + if (a.size() != b.size()) { + >>>>>>> TEMP_RIGHT_BRANCH +3:625c + if (a.size() != b.size()) { +====1 +1:626c + public EquatorWrapper(final Equator equator, final O object) { +2:666c +3:649c + EquatorWrapper(final Equator equator, final O object) { +====1 +1:655c + * @param coll the {@link Iterable} to search +2:695c +3:678c + * @param collection the {@link Iterable} to search +==== +1:663,667c + public static int cardinality(final O obj, final Iterable coll) { + if (coll == null) { + throw new NullPointerException("coll must not be null."); + } + return IterableUtils.frequency(coll, obj); +2:703,716c + <<<<<<< HEAD + public static int cardinality(final O obj, final Iterable coll) { + Objects.requireNonNull(coll, "The collection must not be null."); + return IterableUtils.frequency(coll, obj); + ||||||| 4551c3df1 + public static int cardinality(final O obj, final Iterable coll) { + if (coll == null) { + throw new NullPointerException("coll must not be null."); + } + return IterableUtils.frequency(coll, obj); + ======= + public static int cardinality(final O obj, final Iterable collection) { + return IterableUtils.frequency(Objects.requireNonNull(collection, "collection"), obj); + >>>>>>> TEMP_RIGHT_BRANCH +3:686,687c + public static int cardinality(final O obj, final Iterable collection) { + return IterableUtils.frequency(Objects.requireNonNull(collection, "collection"), obj); +====1 +1:799c + * This is equivalent to filter(collection, PredicateUtils.notPredicate(predicate)) +2:848c +3:819c + * This is equivalent to {@code filter(collection, PredicateUtils.notPredicate(predicate))} +====1 +1:856c + * A null collection or predicate matches no elements. +2:905c +3:876c + * A {@code null} collection or predicate matches no elements. +====1 +1:874c + * A null collection or predicate returns false. +2:923c +3:894c + * A {@code null} collection or predicate returns false. +====1 +1:893c + * A null predicate returns false. +2:942c +3:913c + * A {@code null} predicate returns false. +====1 +1:896c + * A null or empty collection returns true. +2:945c +3:916c + * A {@code null} or empty collection returns true. +====1 +1:916c + * A null predicate matches no elements. +2:965c +3:936c + * A {@code null} predicate matches no elements. +====1 +1:923c + * @throws NullPointerException if the input collection is null +2:971a +3:942a +====1 +1:927,928c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:975,979c +3:946,950c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====1 +1:965,966c + * Elements matching the predicate are added to the outputCollection, + * all other elements are added to the rejectedCollection. +2:1016,1017c +3:987,988c + * Elements matching the predicate are added to the {@code outputCollection}, + * all other elements are added to the {@code rejectedCollection}. +====1 +1:969,970c + * If the input predicate is null, no elements are added to + * outputCollection or rejectedCollection. +2:1020,1021c +3:991,992c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection} or {@code rejectedCollection}. +====1 +1:1010c + * If the input predicate is null, the result is an empty +2:1061c +3:1032c + * If the input predicate is {@code null}, the result is an empty +====1 +1:1018c + * @throws NullPointerException if the input collection is null +2:1068a +3:1039a +====1 +1:1022,1023c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1072,1076c +3:1043,1047c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====1 +1:1031,1032c + * If the input predicate is null, no elements are added to + * outputCollection. +2:1084,1085c +3:1055,1056c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection}. +====1 +1:1072,1073c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1125,1129c +3:1096,1100c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +==== +1:1165,1167c + if (collection == null) { + throw new NullPointerException("The collection must not be null"); + } +2:1221,1229c + <<<<<<< HEAD + Objects.requireNonNull(collection, "The collection must not be null."); + ||||||| 4551c3df1 + if (collection == null) { + throw new NullPointerException("The collection must not be null"); + } + ======= + Objects.requireNonNull(collection, "collection"); + >>>>>>> TEMP_RIGHT_BRANCH +3:1192c + Objects.requireNonNull(collection, "collection"); +====1 +1:1182a +2:1245,1246c +3:1208,1209c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterable, "The iterable of elements to add must not be null."); +====1 +1:1198a +2:1263,1264c +3:1226,1227c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterator, "The iterator of elements to add must not be null."); +====1 +1:1215a +2:1282,1283c +3:1245,1246c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(enumeration, "The enumeration of elements to add must not be null."); +====1 +1:1232a +2:1301,1302c +3:1264,1265c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(elements, "The array of elements to add must not be null."); +====1 +1:1241,1242c + * Returns the index-th value in {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +2:1311,1312c +3:1274,1275c + * Returns the {@code index}-th value in {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1244,1245c + * The Iterator is advanced to index (or to the end, if + * index exceeds the number of entries) as a side effect of this method. +2:1314,1315c +3:1277,1278c + * The Iterator is advanced to {@code index} (or to the end, if + * {@code index} exceeds the number of entries) as a side effect of this method. +====1 +1:1257a +2:1328c +3:1291c + Objects.requireNonNull(iterator, "The iterator must not be null."); +====1 +1:1273,1274c + * Returns the index-th value in the iterable's {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +2:1344,1345c +3:1307,1308c + * Returns the {@code index}-th value in the {@code iterable}'s {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1287a +2:1359c +3:1322c + Objects.requireNonNull(iterable, "The iterable must not be null."); +====1 +1:1292,1294c + * Returns the index-th value in object, throwing + * IndexOutOfBoundsException if there is no such element or + * IllegalArgumentException if object is not an +2:1364,1366c +3:1327,1329c + * Returns the {@code index}-th value in {@code object}, throwing + * {@code IndexOutOfBoundsException} if there is no such element or + * {@code IllegalArgumentException} if {@code object} is not an +====1 +1:1300,1301c + *
  • Map -- the value returned is the Map.Entry in position + * index in the map's entrySet iterator, +2:1372,1373c +3:1335,1336c + *
  • Map -- the value returned is the {@code Map.Entry} in position + * {@code index} in the map's {@code entrySet} iterator, +====1 +1:1304,1305c + *
  • Array -- the index-th array entry is returned, + * if there is such an entry; otherwise an IndexOutOfBoundsException +2:1376,1377c +3:1339,1340c + *
  • Array -- the {@code index}-th array entry is returned, + * if there is such an entry; otherwise an {@code IndexOutOfBoundsException} +====1 +1:1307c + *
  • Collection -- the value returned is the index-th object +2:1379c +3:1342c + *
  • Collection -- the value returned is the {@code index}-th object +====1 +1:1310c + * index-th object in the Iterator/Enumeration, if there +2:1382c +3:1345c + * {@code index}-th object in the Iterator/Enumeration, if there +====1 +1:1312c + * index (or to the end, if index exceeds the +2:1384c +3:1347c + * {@code index} (or to the end, if {@code index} exceeds the +====1 +1:1327c + if (object instanceof Map) { +2:1399c +3:1362c + if (object instanceof Map) { +====1 +1:1354,1355c + * Returns the index-th Map.Entry in the map's entrySet, + * throwing IndexOutOfBoundsException if there is no such element. +2:1426,1427c +3:1389,1390c + * Returns the {@code index}-th {@code Map.Entry} in the {@code map}'s {@code entrySet}, + * throwing {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1358c + * @param the key type in the {@link Map} +2:1430c +3:1393c + * @param the value type in the {@link Map} +==== +1:1364c + public static Map.Entry get(final Map map, final int index) { +2:1436,1442c + public static Map.Entry get(final Map map, final int index) { + <<<<<<< HEAD + Objects.requireNonNull(map, "The map must not be null."); + ||||||| 4551c3df1 + public static Map.Entry get(final Map map, final int index) { + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:1399,1400c + public static Map.Entry get(final Map map, final int index) { + Objects.requireNonNull(map, "The map must not be null."); +====1 +1:1392c + if (object instanceof Map) { +2:1470c +3:1428c + if (object instanceof Map) { +====1 +1:1499a +2:1578c +3:1536c + Objects.requireNonNull(array, "The array must not be null."); +====1 +1:1526c + * @param coll the collection to check +2:1605c +3:1563c + * @param collection the collection to check +==== +1:1530,1535c + public static boolean isFull(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); +2:1609,1626c + <<<<<<< HEAD + public static boolean isFull(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); + ||||||| 4551c3df1 + public static boolean isFull(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); + ======= + public static boolean isFull(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).isFull(); + >>>>>>> TEMP_RIGHT_BRANCH +3:1567,1570c + public static boolean isFull(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).isFull(); +====1 +1:1539c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +2:1630c +3:1574c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====1 +1:1559c + * @param coll the collection to check +2:1650c +3:1594c + * @param collection the collection to check +==== +1:1563,1568c + public static int maxSize(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); +2:1654,1671c + <<<<<<< HEAD + public static int maxSize(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); + ||||||| 4551c3df1 + public static int maxSize(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); + ======= + public static int maxSize(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).maxSize(); + >>>>>>> TEMP_RIGHT_BRANCH +3:1598,1601c + public static int maxSize(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).maxSize(); +====1 +1:1572c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +2:1675c +3:1605c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====1 +1:1649,1651c + * @param a the first collection, must not be null + * @param b the second collection, must not be null + * @param c the comparator to use for the merge. +2:1752,1754c +3:1682,1684c + * @param iterableA the first collection, must not be null + * @param iterableB the second collection, must not be null + * @param comparator the comparator to use for the merge. +==== +1:1658,1659c + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { +2:1761,1779c + <<<<<<< HEAD + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(c, "The comparator must not be null."); + ||||||| 4551c3df1 + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + + if (a == null || b == null) { + throw new NullPointerException("The collections must not be null"); + } + if (c == null) { + throw new NullPointerException("The comparator must not be null"); + } + ======= + public static List collate(final Iterable iterableA, final Iterable iterableB, + final Comparator comparator, final boolean includeDuplicates) { +3:1691,1692c + public static List collate(final Iterable iterableA, final Iterable iterableB, + final Comparator comparator, final boolean includeDuplicates) { +==== +1:1661,1666c + if (a == null || b == null) { + throw new NullPointerException("The collections must not be null"); + } + if (c == null) { + throw new NullPointerException("The comparator must not be null"); + } +2:1781,1784c + Objects.requireNonNull(iterableA, "iterableA"); + Objects.requireNonNull(iterableB, "iterableB"); + Objects.requireNonNull(comparator, "comparator"); + >>>>>>> TEMP_RIGHT_BRANCH +3:1694,1696c + Objects.requireNonNull(iterableA, "iterableA"); + Objects.requireNonNull(iterableB, "iterableB"); + Objects.requireNonNull(comparator, "comparator"); +====1 +1:1669,1670c + final int totalSize = a instanceof Collection && b instanceof Collection ? + Math.max(1, ((Collection) a).size() + ((Collection) b).size()) : 10; +2:1787,1788c +3:1699,1700c + final int totalSize = iterableA instanceof Collection && iterableB instanceof Collection ? + Math.max(1, ((Collection) iterableA).size() + ((Collection) iterableB).size()) : 10; +====1 +1:1672c + final Iterator iterator = new CollatingIterator<>(c, a.iterator(), b.iterator()); +2:1790c +3:1702c + final Iterator iterator = new CollatingIterator<>(comparator, iterableA.iterator(), iterableB.iterator()); +====1 +1:1713a +2:1832c +3:1744c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1724,1727c + * Returns a collection containing all the elements in collection + * that are also in retain. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless retain does not contain e, in which +2:1843,1846c +3:1755,1758c + * Returns a collection containing all the elements in {@code collection} + * that are also in {@code retain}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code retain} does not contain {@code e}, in which +====1 +1:1729c + * the collection c and thus cannot call c.retainAll(retain);. +2:1848c +3:1760c + * the collection {@code c} and thus cannot call {@code c.retainAll(retain);}. +====1 +1:1731,1732c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in retain. If it's contained, it's added +2:1850,1851c +3:1762,1763c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code retain}. If it's contained, it's added +====1 +1:1734c + * retain that provides a fast (e.g. O(1)) implementation of +2:1853c +3:1765c + * {@code retain} that provides a fast (e.g. O(1)) implementation of +====1 +1:1741,1742c + * @return a Collection containing all the elements of collection + * that occur at least once in retain. +2:1860,1861c +3:1772,1773c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain}. +====1 +1:1746a +2:1866,1867c +3:1778,1779c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); +====1 +1:1752,1755c + * collection that are also in retain. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless retain does not contain e, in which case +2:1873,1876c +3:1785,1788c + * {@code collection} that are also in {@code retain}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code retain} does not contain {@code e}, in which case +====1 +1:1757,1758c + * modify the collection c and thus cannot call + * c.retainAll(retain);. +2:1878,1879c +3:1790,1791c + * modify the collection {@code c} and thus cannot call + * {@code c.retainAll(retain);}. +====1 +1:1762c + * in collection and retain. Hence this method is +2:1883c +3:1795c + * in {@code collection} and {@code retain}. Hence this method is +====1 +1:1771,1772c + * @return a Collection containing all the elements of collection + * that occur at least once in retain according to the equator +2:1892,1893c +3:1804,1805c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain} according to the {@code equator} +====1 +1:1779c + +2:1900,1902c +3:1812,1814c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1800,1802c + * @param input the collection will be operated, can't be null + * @param startIndex the start index (inclusive) to remove element, can't be less than 0 + * @param endIndex the end index (exclusive) to remove, can't be less than startIndex +2:1923,1925c +3:1835,1837c + * @param input the collection will be operated, must not be null + * @param startIndex the start index (inclusive) to remove element, must not be less than 0 + * @param endIndex the end index (exclusive) to remove, must not be less than startIndex +====1 +1:1807,1809c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1930c +3:1842c + Objects.requireNonNull(input, "The collection must not be null."); +====1 +1:1831,1833c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1952c +3:1864c + Objects.requireNonNull(input, "The collection must not be null."); +====1 +1:1845,1846c + Collection result = new ArrayList(count); + Iterator iterator = input.iterator(); +2:1964,1965c +3:1876,1877c + final Collection result = new ArrayList<>(count); + final Iterator iterator = input.iterator(); +====1 +1:1861,1865c + * Removes the elements in remove from collection. That is, this + * method returns a collection containing all the elements in c + * that are not in remove. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless remove contains e, in which +2:1980,1984c +3:1892,1896c + * Removes the elements in {@code remove} from {@code collection}. That is, this + * method returns a collection containing all the elements in {@code c} + * that are not in {@code remove}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code remove} contains {@code e}, in which +====1 +1:1867c + * the collection c and thus cannot call collection.removeAll(remove);. +2:1986c +3:1898c + * the collection {@code c} and thus cannot call {@code collection.removeAll(remove);}. +====1 +1:1869,1870c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in remove. If it's not contained, it's added +2:1988,1989c +3:1900,1901c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code remove}. If it's not contained, it's added +====1 +1:1872c + * remove that provides a fast (e.g. O(1)) implementation of +2:1991c +3:1903c + * {@code remove} that provides a fast (e.g. O(1)) implementation of +====1 +1:1878,1880c + * @param remove the items to be removed from the returned collection + * @return a Collection containing all the elements of collection except + * any elements that also occur in remove. +2:1997,1999c +3:1909,1911c + * @param remove the items to be removed from the returned {@code collection} + * @return a {@code Collection} containing all the elements of {@code collection} except + * any elements that also occur in {@code remove}. +====1 +1:1886c + } +2:2005c +3:1917c + } +====1 +1:1889c + * Removes all elements in remove from collection. +2:2008c +3:1920c + * Removes all elements in {@code remove} from {@code collection}. +====1 +1:1891,1894c + * collection that are not in remove. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless remove contains e, in which case the +2:2010,2013c +3:1922,1925c + * {@code collection} that are not in {@code remove}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code remove} contains {@code e}, in which case the +====1 +1:1896,1897c + * the collection c and thus cannot call + * collection.removeAll(remove). +2:2015,2016c +3:1927,1928c + * the collection {@code c} and thus cannot call + * {@code collection.removeAll(remove)}. +====1 +1:1901c + * in collection and remove. Hence this method is +2:2020c +3:1932c + * in {@code collection} and {@code remove}. Hence this method is +====1 +1:1910,1911c + * @return a Collection containing all the elements of collection + * except any element that if equal according to the equator +2:2029,2030c +3:1941,1942c + * @return a {@code Collection} containing all the elements of {@code collection} + * except any element that if equal according to the {@code equator} +====1 +1:1918c + +2:2037,2039c +3:1949,1951c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(remove, "The items to be removed must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1960a +2:2082c +3:1994c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1977a +2:2100c +3:2012c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1997a +2:2121,2122c +3:2033,2034c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(predicate, "The predicate must not be null."); +====1 +1:2020a +2:2146,2147c +3:2058,2059c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(transformer, "The transformer must not be null."); +==== +1:2035,2037c + if (collection == null) { + throw new NullPointerException("Collection must not be null."); + } +2:2162,2170c + <<<<<<< HEAD + Objects.requireNonNull(collection, "The collection must not be null."); + ||||||| 4551c3df1 + if (collection == null) { + throw new NullPointerException("Collection must not be null."); + } + ======= + Objects.requireNonNull(collection, "collection"); + >>>>>>> TEMP_RIGHT_BRANCH +3:2074c + Objects.requireNonNull(collection, "collection"); diff --git a/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_minimal/diff_CollectionUtilsTest.java.txt b/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_minimal/diff_CollectionUtilsTest.java.txt new file mode 100644 index 0000000000..b98dcdbd96 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_minimal/diff_CollectionUtilsTest.java.txt @@ -0,0 +1,725 @@ +====1 +1:193a +2:194,198c +3:194,198c + @Test(expected = NullPointerException.class) + public void testGetCardinalityMapNull() { + CollectionUtils.getCardinalityMap(null); + } + +====1 +1:349a +2:355,377c +3:355,377c + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.containsAny(null, list); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl3() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:383a +2:412,433c +3:412,433c + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl1() { + final String[] oneArr = {"1"}; + CollectionUtils.containsAny(null, oneArr); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullArray() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:402a +2:453,466c +3:453,466c + @Test(expected = NullPointerException.class) + public void testUnionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(null, list); + } + + @Test(expected = NullPointerException.class) + public void testUnionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(list, null); + } + +====1 +1:421a +2:486,499c +3:486,499c + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(list, null); + } + +====1 +1:440a +2:519,532c +3:519,532c + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(null, list); + } + + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(list, null); + } + +====1 +1:475a +2:568,581c +3:568,581c + @Test(expected = NullPointerException.class) + public void testSubtractNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(null, list); + } + + @Test(expected = NullPointerException.class) + public void testSubtractNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(list, null); + } + +====1 +1:541a +2:648,661c +3:648,661c + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(list, null); + } + +====1 +1:623a +2:744,789c +3:744,789c + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(null, list, e); + } + + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(list, null, e); + } + +====1 +1:645a +2:812,825c +3:812,825c + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(list, null); + } + +====1 +1:655c + assertNull(CollectionUtils.find(null,testPredicate)); +2:835c +3:835c + assertNull(CollectionUtils.find(null, testPredicate)); +====1 +1:1279c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long)input).intValue()); +2:1459c +3:1459c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long) input).intValue()); +====1 +1:1331a +2:1512,1516c +3:1512,1516c + @Test(expected = NullPointerException.class) + public void testAddIgnoreNullNullColl() { + CollectionUtils.addIgnoreNull(null, "1"); + } + +====1 +1:1338,1349c + try { + CollectionUtils.predicatedCollection(new ArrayList(), null); + fail("Expecting NullPointerException for null predicate."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.predicatedCollection(null, predicate); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1523,1534c +3:1523,1534c + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullColl() { + final Predicate predicate = PredicateUtils.instanceofPredicate(Integer.class); + CollectionUtils.predicatedCollection(null, predicate); + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullPredicate() { + final Collection list = new ArrayList<>(); + CollectionUtils.predicatedCollection(list, null); +====1 +1:1358,1362c + try { + CollectionUtils.isFull(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1542a +3:1542a +====1 +1:1372a +2:1553,1557c +3:1553,1557c + @Test(expected = NullPointerException.class) + public void testIsFullNullColl() { + CollectionUtils.isFull(null); + } + +====1 +1:1385,1389c + try { + CollectionUtils.maxSize(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1569a +3:1569a +====1 +1:1399a +2:1580,1584c +3:1580,1584c + @Test(expected = NullPointerException.class) + public void testMaxSizeNullColl() { + CollectionUtils.maxSize(null); + } + +====1 +1:1466a +2:1652c +3:1652c + } +====1 +1:1468,1472c + try { + CollectionUtils.retainAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1654,1665c +3:1654,1665c + @Test(expected = NullPointerException.class) + public void testRetainAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.retainAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRetainAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.retainAll(base, null); +====1 +1:1477c + List list = new ArrayList<>(); +2:1670c +3:1670c + final List list = new ArrayList<>(); +====1 +1:1493c + @Test(expected=IllegalArgumentException.class) +2:1686c +3:1686c + @Test(expected=NullPointerException.class) +==== +1:1495,1496c + Collection list = null; + Collection result = CollectionUtils.removeRange(list, 0, 0); +2:1688,1697c + <<<<<<< HEAD + Collection list = null; + CollectionUtils.removeRange(list, 0, 0); + ||||||| 4551c3df1 + Collection list = null; + Collection result = CollectionUtils.removeRange(list, 0, 0); + ======= + final Collection list = null; + final Collection result = CollectionUtils.removeRange(list, 0, 0); + >>>>>>> TEMP_RIGHT_BRANCH +3:1688,1689c + final Collection list = null; + CollectionUtils.removeRange(list, 0, 0); +====1 +1:1501c + Collection list = new ArrayList<>(); +2:1702c +3:1694c + final Collection list = new ArrayList<>(); +==== +1:1503c + Collection result = CollectionUtils.removeRange(list, -1, 1); +2:1704,1710c + <<<<<<< HEAD + CollectionUtils.removeRange(list, -1, 1); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, -1, 1); + ======= + final Collection result = CollectionUtils.removeRange(list, -1, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1696c + CollectionUtils.removeRange(list, -1, 1); +====1 +1:1508c + Collection list = new ArrayList<>(); +2:1715c +3:1701c + final Collection list = new ArrayList<>(); +==== +1:1510c + Collection result = CollectionUtils.removeRange(list, 0, -1); +2:1717,1723c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 0, -1); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 0, -1); + ======= + final Collection result = CollectionUtils.removeRange(list, 0, -1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1703c + CollectionUtils.removeRange(list, 0, -1); +====1 +1:1515c + Collection list = new ArrayList<>(); +2:1728c +3:1708c + final Collection list = new ArrayList<>(); +==== +1:1518c + Collection result = CollectionUtils.removeRange(list, 1, 0); +2:1731,1737c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 1, 0); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 1, 0); + ======= + final Collection result = CollectionUtils.removeRange(list, 1, 0); + >>>>>>> TEMP_RIGHT_BRANCH +3:1711c + CollectionUtils.removeRange(list, 1, 0); +====1 +1:1523c + Collection list = new ArrayList<>(); +2:1742c +3:1716c + final Collection list = new ArrayList<>(); +==== +1:1525c + Collection result = CollectionUtils.removeRange(list, 0, 2); +2:1744,1750c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 0, 2); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 0, 2); + ======= + final Collection result = CollectionUtils.removeRange(list, 0, 2); + >>>>>>> TEMP_RIGHT_BRANCH +3:1718c + CollectionUtils.removeRange(list, 0, 2); +====1 +1:1530c + List list = new ArrayList<>(); +2:1755c +3:1723c + final List list = new ArrayList<>(); +====1 +1:1559c + @Test(expected=IllegalArgumentException.class) +2:1784c +3:1752c + @Test(expected=NullPointerException.class) +==== +1:1561,1562c + Collection list = null; + Collection result = CollectionUtils.removeCount(list, 0, 1); +2:1786,1795c + <<<<<<< HEAD + Collection list = null; + CollectionUtils.removeCount(list, 0, 1); + ||||||| 4551c3df1 + Collection list = null; + Collection result = CollectionUtils.removeCount(list, 0, 1); + ======= + final Collection list = null; + final Collection result = CollectionUtils.removeCount(list, 0, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1754,1755c + final Collection list = null; + CollectionUtils.removeCount(list, 0, 1); +==== +1:1567,1568c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, -1, 1); +2:1800,1809c + <<<<<<< HEAD + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); + ||||||| 4551c3df1 + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, -1, 1); + ======= + final Collection list = new ArrayList<>(); + final Collection result = CollectionUtils.removeCount(list, -1, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1760,1761c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); +==== +1:1573,1574c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, 0, -1); +2:1814,1823c + <<<<<<< HEAD + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); + ||||||| 4551c3df1 + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, 0, -1); + ======= + final Collection list = new ArrayList<>(); + final Collection result = CollectionUtils.removeCount(list, 0, -1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1766,1767c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); +====1 +1:1579c + Collection list = new ArrayList<>(); +2:1828c +3:1772c + final Collection list = new ArrayList<>(); +==== +1:1581c + Collection result = CollectionUtils.removeCount(list, 0, 2); +2:1830,1836c + <<<<<<< HEAD + CollectionUtils.removeCount(list, 0, 2); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeCount(list, 0, 2); + ======= + final Collection result = CollectionUtils.removeCount(list, 0, 2); + >>>>>>> TEMP_RIGHT_BRANCH +3:1774c + CollectionUtils.removeCount(list, 0, 2); +====1 +1:1607a +2:1863c +3:1801c + } +====1 +1:1609,1613c + try { + CollectionUtils.removeAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1865,1876c +3:1803,1814c + @Test(expected = NullPointerException.class) + public void testRemoveAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.removeAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRemoveAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.removeAll(base, null); +====1 +1:1622,1633c + try { + CollectionUtils.transformingCollection(new ArrayList<>(), null); + fail("Expecting NullPointerException for null transformer."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.transformingCollection(null, transformer); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1885,1896c +3:1823,1834c + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullColl() { + final Transformer transformer = TransformerUtils.nopTransformer(); + CollectionUtils.transformingCollection(null, transformer); + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullTransformer() { + final List list = new ArrayList<>(); + CollectionUtils.transformingCollection(list, null); +====1 +1:1740c + CollectionUtils.addAll(collectionA, new Integer[]{5}); +2:2003c +3:1941c + CollectionUtils.addAll(collectionA, 5); +==== +1:1744c + @Test(expected=IndexOutOfBoundsException.class) +2:2007,2047c + <<<<<<< HEAD + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected=IndexOutOfBoundsException.class) + ||||||| 4551c3df1 + @Test(expected=IndexOutOfBoundsException.class) + ======= + @Test(expected = IndexOutOfBoundsException.class) + >>>>>>> TEMP_RIGHT_BRANCH +3:1945,1979c + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected = IndexOutOfBoundsException.class) +====1 +1:1746c + CollectionUtils.get((Object)collectionA, -3); +2:2049c +3:1981c + CollectionUtils.get((Object) collectionA, -3); +====1 +1:1749c + @Test(expected=IndexOutOfBoundsException.class) +2:2052c +3:1984c + @Test(expected = IndexOutOfBoundsException.class) +====1 +1:1751c + CollectionUtils.get((Object)collectionA.iterator(), 30); +2:2054c +3:1986c + CollectionUtils.get((Object) collectionA.iterator(), 30); +====1 +1:1754c + @Test(expected=IllegalArgumentException.class) +2:2057c +3:1989c + @Test(expected = IllegalArgumentException.class) +====1 +1:1756c + CollectionUtils.get((Object)null, 0); +2:2059c +3:1991c + CollectionUtils.get((Object) null, 0); +====1 +1:1761,1762c + assertEquals(2, CollectionUtils.get((Object)collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object)collectionA.iterator(), 2)); +2:2064,2065c +3:1996,1997c + assertEquals(2, CollectionUtils.get((Object) collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object) collectionA.iterator(), 2)); +====1 +1:1764c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object)map, 0)); +2:2067c +3:1999c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object) map, 0)); +====1 +1:1795a +2:2099,2103c +3:2031,2035c + @Test(expected = NullPointerException.class) + public void testReverseArrayNull() { + CollectionUtils.reverseArray(null); + } + +====1 +1:1828a +2:2137,2141c +3:2069,2073c + public void collateException0() { + CollectionUtils.collate(null, collectionC); + } + + @Test(expected=NullPointerException.class) diff --git a/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_myers/diff_CollectionUtils.java.txt b/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_myers/diff_CollectionUtils.java.txt new file mode 100644 index 0000000000..a61e64f7d0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_myers/diff_CollectionUtils.java.txt @@ -0,0 +1,985 @@ +====1 +1:30a +2:31c +3:31c + import java.util.Objects; +====1 +1:75c + public CardinalityHelper(final Iterable a, final Iterable b) { +2:76c +3:76c + CardinalityHelper(final Iterable a, final Iterable b) { +====1 +1:142c + public SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +2:143c +3:143c + SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +====1 +1:187c + * CollectionUtils should not normally be instantiated. +2:188c +3:188c + * {@code CollectionUtils} should not normally be instantiated. +====1 +1:205c + * Returns an immutable empty collection if the argument is null, +2:206c +3:206c + * Returns an immutable empty collection if the argument is {@code null}, +====1 +1:209,210c + * @param collection the collection, possibly null + * @return an empty collection if the argument is null +2:210,211c +3:210,211c + * @param collection the collection, possibly {@code null} + * @return an empty collection if the argument is {@code null} +====1 +1:232a +2:234,235c +3:234,235c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:257a +2:261,262c +3:261,262c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:287a +2:293,294c +3:293,294c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:340a +2:348,350c +3:348,350c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(p, "The predicate must not be null."); +====1 +1:357c + * Returns true iff all elements of {@code coll2} are also contained +2:367c +3:367c + * Returns {@code true} iff all elements of {@code coll2} are also contained +====1 +1:361c + * In other words, this method returns true iff the +2:371c +3:371c + * In other words, this method returns {@code true} iff the +====1 +1:376c + * @return true iff the intersection of the collections has the same cardinality +2:386c +3:386c + * @return {@code true} iff the intersection of the collections has the same cardinality +====1 +1:380a +2:391,392c +3:391,392c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====1 +1:409c + * Returns true iff at least one element is in both collections. +2:421c +3:421c + * Returns {@code true} iff at least one element is in both collections. +====1 +1:411c + * In other words, this method returns true iff the +2:423c +3:423c + * In other words, this method returns {@code true} iff the +====1 +1:415c + * @param the type of object to lookup in coll1. +2:427c +3:427c + * @param the type of object to lookup in {@code coll1}. +====1 +1:418c + * @return true iff the intersection of the collections is non-empty +2:430c +3:430c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:422a +2:435,436c +3:435,436c + Objects.requireNonNull(coll1, "The collection must not be null."); + Objects.requireNonNull(coll2, "The elements must not be null."); +====1 +1:440c + * Returns true iff at least one element is in both collections. +2:454c +3:454c + * Returns {@code true} iff at least one element is in both collections. +====1 +1:442c + * In other words, this method returns true iff the +2:456c +3:456c + * In other words, this method returns {@code true} iff the +====1 +1:448c + * @return true iff the intersection of the collections is non-empty +2:462c +3:462c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:452a +2:467,468c +3:467,468c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====1 +1:482a +2:499c +3:499c + Objects.requireNonNull(coll, "The collection must not be null."); +====1 +1:503c + * @return true iff a is a sub-collection of b +2:520c +3:520c + * @return {@code true} iff a is a sub-collection of b +====1 +1:507a +2:525,526c +3:525,526c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:528c + *
  • a.size() and b.size() represent the +2:547c +3:547c + *
  • {@code a.size()} and {@code b.size()} represent the +====1 +1:530c + *
  • a.size() < Integer.MAXVALUE
  • +2:549c +3:549c + *
  • {@code a.size() < Integer.MAXVALUE}
  • +====1 +1:535c + * @return true iff a is a proper sub-collection of b +2:554c +3:554c + * @return {@code true} iff a is a proper sub-collection of b +====1 +1:539a +2:559,560c +3:559,560c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:554c + * @return true iff the collections contain the same elements with the same cardinalities. +2:575c +3:575c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:557c + if(a.size() != b.size()) { +2:578,586c + <<<<<<< HEAD + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if(a.size() != b.size()) { + ||||||| 4551c3df1 + if(a.size() != b.size()) { + ======= + if (a.size() != b.size()) { + >>>>>>> TEMP_RIGHT_BRANCH +3:578,580c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if (a.size() != b.size()) { +====1 +1:561c + if(helper.cardinalityA.size() != helper.cardinalityB.size()) { +2:590c +3:584c + if (helper.cardinalityA.size() != helper.cardinalityB.size()) { +====1 +1:564,565c + for( final Object obj : helper.cardinalityA.keySet()) { + if(helper.freqA(obj) != helper.freqB(obj)) { +2:593,594c +3:587,588c + for (final Object obj : helper.cardinalityA.keySet()) { + if (helper.freqA(obj) != helper.freqB(obj)) { +====1 +1:591c + * @return true iff the collections contain the same elements with the same cardinalities. +2:620c +3:614c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:598,600c + if (equator == null) { + throw new NullPointerException("Equator must not be null."); + } +2:627,639c + <<<<<<< HEAD + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); + if(a.size() != b.size()) { + ||||||| 4551c3df1 + if (equator == null) { + throw new NullPointerException("Equator must not be null."); + } + + if(a.size() != b.size()) { + ======= + Objects.requireNonNull(equator, "equator"); +3:621,623c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "equator"); +==== +1:602c + if(a.size() != b.size()) { +2:641,642c + if (a.size() != b.size()) { + >>>>>>> TEMP_RIGHT_BRANCH +3:625c + if (a.size() != b.size()) { +====1 +1:626c + public EquatorWrapper(final Equator equator, final O object) { +2:666c +3:649c + EquatorWrapper(final Equator equator, final O object) { +====1 +1:655c + * @param coll the {@link Iterable} to search +2:695c +3:678c + * @param collection the {@link Iterable} to search +==== +1:663,667c + public static int cardinality(final O obj, final Iterable coll) { + if (coll == null) { + throw new NullPointerException("coll must not be null."); + } + return IterableUtils.frequency(coll, obj); +2:703,716c + <<<<<<< HEAD + public static int cardinality(final O obj, final Iterable coll) { + Objects.requireNonNull(coll, "The collection must not be null."); + return IterableUtils.frequency(coll, obj); + ||||||| 4551c3df1 + public static int cardinality(final O obj, final Iterable coll) { + if (coll == null) { + throw new NullPointerException("coll must not be null."); + } + return IterableUtils.frequency(coll, obj); + ======= + public static int cardinality(final O obj, final Iterable collection) { + return IterableUtils.frequency(Objects.requireNonNull(collection, "collection"), obj); + >>>>>>> TEMP_RIGHT_BRANCH +3:686,687c + public static int cardinality(final O obj, final Iterable collection) { + return IterableUtils.frequency(Objects.requireNonNull(collection, "collection"), obj); +====1 +1:799c + * This is equivalent to filter(collection, PredicateUtils.notPredicate(predicate)) +2:848c +3:819c + * This is equivalent to {@code filter(collection, PredicateUtils.notPredicate(predicate))} +====1 +1:856c + * A null collection or predicate matches no elements. +2:905c +3:876c + * A {@code null} collection or predicate matches no elements. +====1 +1:874c + * A null collection or predicate returns false. +2:923c +3:894c + * A {@code null} collection or predicate returns false. +====1 +1:893c + * A null predicate returns false. +2:942c +3:913c + * A {@code null} predicate returns false. +====1 +1:896c + * A null or empty collection returns true. +2:945c +3:916c + * A {@code null} or empty collection returns true. +====1 +1:916c + * A null predicate matches no elements. +2:965c +3:936c + * A {@code null} predicate matches no elements. +====1 +1:923c + * @throws NullPointerException if the input collection is null +2:971a +3:942a +====1 +1:927,928c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:975,979c +3:946,950c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====1 +1:965,966c + * Elements matching the predicate are added to the outputCollection, + * all other elements are added to the rejectedCollection. +2:1016,1017c +3:987,988c + * Elements matching the predicate are added to the {@code outputCollection}, + * all other elements are added to the {@code rejectedCollection}. +====1 +1:969,970c + * If the input predicate is null, no elements are added to + * outputCollection or rejectedCollection. +2:1020,1021c +3:991,992c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection} or {@code rejectedCollection}. +====1 +1:1010c + * If the input predicate is null, the result is an empty +2:1061c +3:1032c + * If the input predicate is {@code null}, the result is an empty +====1 +1:1018c + * @throws NullPointerException if the input collection is null +2:1068a +3:1039a +====1 +1:1022,1023c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1072,1076c +3:1043,1047c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====1 +1:1031,1032c + * If the input predicate is null, no elements are added to + * outputCollection. +2:1084,1085c +3:1055,1056c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection}. +====1 +1:1072,1073c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1125,1129c +3:1096,1100c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +==== +1:1165,1167c + if (collection == null) { + throw new NullPointerException("The collection must not be null"); + } +2:1221,1229c + <<<<<<< HEAD + Objects.requireNonNull(collection, "The collection must not be null."); + ||||||| 4551c3df1 + if (collection == null) { + throw new NullPointerException("The collection must not be null"); + } + ======= + Objects.requireNonNull(collection, "collection"); + >>>>>>> TEMP_RIGHT_BRANCH +3:1192c + Objects.requireNonNull(collection, "collection"); +====1 +1:1182a +2:1245,1246c +3:1208,1209c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterable, "The iterable of elements to add must not be null."); +====1 +1:1198a +2:1263,1264c +3:1226,1227c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterator, "The iterator of elements to add must not be null."); +====1 +1:1215a +2:1282,1283c +3:1245,1246c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(enumeration, "The enumeration of elements to add must not be null."); +====1 +1:1232a +2:1301,1302c +3:1264,1265c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(elements, "The array of elements to add must not be null."); +====1 +1:1241,1242c + * Returns the index-th value in {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +2:1311,1312c +3:1274,1275c + * Returns the {@code index}-th value in {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1244,1245c + * The Iterator is advanced to index (or to the end, if + * index exceeds the number of entries) as a side effect of this method. +2:1314,1315c +3:1277,1278c + * The Iterator is advanced to {@code index} (or to the end, if + * {@code index} exceeds the number of entries) as a side effect of this method. +====1 +1:1257a +2:1328c +3:1291c + Objects.requireNonNull(iterator, "The iterator must not be null."); +====1 +1:1273,1274c + * Returns the index-th value in the iterable's {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +2:1344,1345c +3:1307,1308c + * Returns the {@code index}-th value in the {@code iterable}'s {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1287a +2:1359c +3:1322c + Objects.requireNonNull(iterable, "The iterable must not be null."); +====1 +1:1292,1294c + * Returns the index-th value in object, throwing + * IndexOutOfBoundsException if there is no such element or + * IllegalArgumentException if object is not an +2:1364,1366c +3:1327,1329c + * Returns the {@code index}-th value in {@code object}, throwing + * {@code IndexOutOfBoundsException} if there is no such element or + * {@code IllegalArgumentException} if {@code object} is not an +====1 +1:1300,1301c + *
  • Map -- the value returned is the Map.Entry in position + * index in the map's entrySet iterator, +2:1372,1373c +3:1335,1336c + *
  • Map -- the value returned is the {@code Map.Entry} in position + * {@code index} in the map's {@code entrySet} iterator, +====1 +1:1304,1305c + *
  • Array -- the index-th array entry is returned, + * if there is such an entry; otherwise an IndexOutOfBoundsException +2:1376,1377c +3:1339,1340c + *
  • Array -- the {@code index}-th array entry is returned, + * if there is such an entry; otherwise an {@code IndexOutOfBoundsException} +====1 +1:1307c + *
  • Collection -- the value returned is the index-th object +2:1379c +3:1342c + *
  • Collection -- the value returned is the {@code index}-th object +====1 +1:1310c + * index-th object in the Iterator/Enumeration, if there +2:1382c +3:1345c + * {@code index}-th object in the Iterator/Enumeration, if there +====1 +1:1312c + * index (or to the end, if index exceeds the +2:1384c +3:1347c + * {@code index} (or to the end, if {@code index} exceeds the +====1 +1:1327c + if (object instanceof Map) { +2:1399c +3:1362c + if (object instanceof Map) { +====1 +1:1354,1355c + * Returns the index-th Map.Entry in the map's entrySet, + * throwing IndexOutOfBoundsException if there is no such element. +2:1426,1427c +3:1389,1390c + * Returns the {@code index}-th {@code Map.Entry} in the {@code map}'s {@code entrySet}, + * throwing {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1358c + * @param the key type in the {@link Map} +2:1430c +3:1393c + * @param the value type in the {@link Map} +==== +1:1364c + public static Map.Entry get(final Map map, final int index) { +2:1436,1442c + public static Map.Entry get(final Map map, final int index) { + <<<<<<< HEAD + Objects.requireNonNull(map, "The map must not be null."); + ||||||| 4551c3df1 + public static Map.Entry get(final Map map, final int index) { + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:1399,1400c + public static Map.Entry get(final Map map, final int index) { + Objects.requireNonNull(map, "The map must not be null."); +====1 +1:1392c + if (object instanceof Map) { +2:1470c +3:1428c + if (object instanceof Map) { +====1 +1:1499a +2:1578c +3:1536c + Objects.requireNonNull(array, "The array must not be null."); +====1 +1:1526c + * @param coll the collection to check +2:1605c +3:1563c + * @param collection the collection to check +==== +1:1530,1535c + public static boolean isFull(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); +2:1609,1626c + <<<<<<< HEAD + public static boolean isFull(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); + ||||||| 4551c3df1 + public static boolean isFull(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); + ======= + public static boolean isFull(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).isFull(); + >>>>>>> TEMP_RIGHT_BRANCH +3:1567,1570c + public static boolean isFull(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).isFull(); +====1 +1:1539c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +2:1630c +3:1574c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====1 +1:1559c + * @param coll the collection to check +2:1650c +3:1594c + * @param collection the collection to check +==== +1:1563,1568c + public static int maxSize(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); +2:1654,1671c + <<<<<<< HEAD + public static int maxSize(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); + ||||||| 4551c3df1 + public static int maxSize(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); + ======= + public static int maxSize(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).maxSize(); + >>>>>>> TEMP_RIGHT_BRANCH +3:1598,1601c + public static int maxSize(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).maxSize(); +====1 +1:1572c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +2:1675c +3:1605c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====1 +1:1649,1651c + * @param a the first collection, must not be null + * @param b the second collection, must not be null + * @param c the comparator to use for the merge. +2:1752,1754c +3:1682,1684c + * @param iterableA the first collection, must not be null + * @param iterableB the second collection, must not be null + * @param comparator the comparator to use for the merge. +==== +1:1658,1659c + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { +2:1761,1779c + <<<<<<< HEAD + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(c, "The comparator must not be null."); + ||||||| 4551c3df1 + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + + if (a == null || b == null) { + throw new NullPointerException("The collections must not be null"); + } + if (c == null) { + throw new NullPointerException("The comparator must not be null"); + } + ======= + public static List collate(final Iterable iterableA, final Iterable iterableB, + final Comparator comparator, final boolean includeDuplicates) { +3:1691,1692c + public static List collate(final Iterable iterableA, final Iterable iterableB, + final Comparator comparator, final boolean includeDuplicates) { +==== +1:1661,1666c + if (a == null || b == null) { + throw new NullPointerException("The collections must not be null"); + } + if (c == null) { + throw new NullPointerException("The comparator must not be null"); + } +2:1781,1784c + Objects.requireNonNull(iterableA, "iterableA"); + Objects.requireNonNull(iterableB, "iterableB"); + Objects.requireNonNull(comparator, "comparator"); + >>>>>>> TEMP_RIGHT_BRANCH +3:1694,1696c + Objects.requireNonNull(iterableA, "iterableA"); + Objects.requireNonNull(iterableB, "iterableB"); + Objects.requireNonNull(comparator, "comparator"); +====1 +1:1669,1670c + final int totalSize = a instanceof Collection && b instanceof Collection ? + Math.max(1, ((Collection) a).size() + ((Collection) b).size()) : 10; +2:1787,1788c +3:1699,1700c + final int totalSize = iterableA instanceof Collection && iterableB instanceof Collection ? + Math.max(1, ((Collection) iterableA).size() + ((Collection) iterableB).size()) : 10; +====1 +1:1672c + final Iterator iterator = new CollatingIterator<>(c, a.iterator(), b.iterator()); +2:1790c +3:1702c + final Iterator iterator = new CollatingIterator<>(comparator, iterableA.iterator(), iterableB.iterator()); +====1 +1:1713a +2:1832c +3:1744c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1724,1727c + * Returns a collection containing all the elements in collection + * that are also in retain. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless retain does not contain e, in which +2:1843,1846c +3:1755,1758c + * Returns a collection containing all the elements in {@code collection} + * that are also in {@code retain}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code retain} does not contain {@code e}, in which +====1 +1:1729c + * the collection c and thus cannot call c.retainAll(retain);. +2:1848c +3:1760c + * the collection {@code c} and thus cannot call {@code c.retainAll(retain);}. +====1 +1:1731,1732c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in retain. If it's contained, it's added +2:1850,1851c +3:1762,1763c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code retain}. If it's contained, it's added +====1 +1:1734c + * retain that provides a fast (e.g. O(1)) implementation of +2:1853c +3:1765c + * {@code retain} that provides a fast (e.g. O(1)) implementation of +====1 +1:1741,1742c + * @return a Collection containing all the elements of collection + * that occur at least once in retain. +2:1860,1861c +3:1772,1773c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain}. +====1 +1:1746a +2:1866,1867c +3:1778,1779c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); +====1 +1:1752,1755c + * collection that are also in retain. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless retain does not contain e, in which case +2:1873,1876c +3:1785,1788c + * {@code collection} that are also in {@code retain}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code retain} does not contain {@code e}, in which case +====1 +1:1757,1758c + * modify the collection c and thus cannot call + * c.retainAll(retain);. +2:1878,1879c +3:1790,1791c + * modify the collection {@code c} and thus cannot call + * {@code c.retainAll(retain);}. +====1 +1:1762c + * in collection and retain. Hence this method is +2:1883c +3:1795c + * in {@code collection} and {@code retain}. Hence this method is +====1 +1:1771,1772c + * @return a Collection containing all the elements of collection + * that occur at least once in retain according to the equator +2:1892,1893c +3:1804,1805c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain} according to the {@code equator} +====1 +1:1779c + +2:1900,1902c +3:1812,1814c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1800,1802c + * @param input the collection will be operated, can't be null + * @param startIndex the start index (inclusive) to remove element, can't be less than 0 + * @param endIndex the end index (exclusive) to remove, can't be less than startIndex +2:1923,1925c +3:1835,1837c + * @param input the collection will be operated, must not be null + * @param startIndex the start index (inclusive) to remove element, must not be less than 0 + * @param endIndex the end index (exclusive) to remove, must not be less than startIndex +====1 +1:1807,1809c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1930c +3:1842c + Objects.requireNonNull(input, "The collection must not be null."); +====1 +1:1831,1833c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1952c +3:1864c + Objects.requireNonNull(input, "The collection must not be null."); +====1 +1:1845,1846c + Collection result = new ArrayList(count); + Iterator iterator = input.iterator(); +2:1964,1965c +3:1876,1877c + final Collection result = new ArrayList<>(count); + final Iterator iterator = input.iterator(); +====1 +1:1861,1865c + * Removes the elements in remove from collection. That is, this + * method returns a collection containing all the elements in c + * that are not in remove. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless remove contains e, in which +2:1980,1984c +3:1892,1896c + * Removes the elements in {@code remove} from {@code collection}. That is, this + * method returns a collection containing all the elements in {@code c} + * that are not in {@code remove}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code remove} contains {@code e}, in which +====1 +1:1867c + * the collection c and thus cannot call collection.removeAll(remove);. +2:1986c +3:1898c + * the collection {@code c} and thus cannot call {@code collection.removeAll(remove);}. +====1 +1:1869,1870c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in remove. If it's not contained, it's added +2:1988,1989c +3:1900,1901c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code remove}. If it's not contained, it's added +====1 +1:1872c + * remove that provides a fast (e.g. O(1)) implementation of +2:1991c +3:1903c + * {@code remove} that provides a fast (e.g. O(1)) implementation of +====1 +1:1878,1880c + * @param remove the items to be removed from the returned collection + * @return a Collection containing all the elements of collection except + * any elements that also occur in remove. +2:1997,1999c +3:1909,1911c + * @param remove the items to be removed from the returned {@code collection} + * @return a {@code Collection} containing all the elements of {@code collection} except + * any elements that also occur in {@code remove}. +====1 +1:1886c + } +2:2005c +3:1917c + } +====1 +1:1889c + * Removes all elements in remove from collection. +2:2008c +3:1920c + * Removes all elements in {@code remove} from {@code collection}. +====1 +1:1891,1894c + * collection that are not in remove. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless remove contains e, in which case the +2:2010,2013c +3:1922,1925c + * {@code collection} that are not in {@code remove}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code remove} contains {@code e}, in which case the +====1 +1:1896,1897c + * the collection c and thus cannot call + * collection.removeAll(remove). +2:2015,2016c +3:1927,1928c + * the collection {@code c} and thus cannot call + * {@code collection.removeAll(remove)}. +====1 +1:1901c + * in collection and remove. Hence this method is +2:2020c +3:1932c + * in {@code collection} and {@code remove}. Hence this method is +====1 +1:1910,1911c + * @return a Collection containing all the elements of collection + * except any element that if equal according to the equator +2:2029,2030c +3:1941,1942c + * @return a {@code Collection} containing all the elements of {@code collection} + * except any element that if equal according to the {@code equator} +====1 +1:1918c + +2:2037,2039c +3:1949,1951c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(remove, "The items to be removed must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1960a +2:2082c +3:1994c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1977a +2:2100c +3:2012c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1997a +2:2121,2122c +3:2033,2034c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(predicate, "The predicate must not be null."); +====1 +1:2020a +2:2146,2147c +3:2058,2059c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(transformer, "The transformer must not be null."); +==== +1:2035,2037c + if (collection == null) { + throw new NullPointerException("Collection must not be null."); + } +2:2162,2170c + <<<<<<< HEAD + Objects.requireNonNull(collection, "The collection must not be null."); + ||||||| 4551c3df1 + if (collection == null) { + throw new NullPointerException("Collection must not be null."); + } + ======= + Objects.requireNonNull(collection, "collection"); + >>>>>>> TEMP_RIGHT_BRANCH +3:2074c + Objects.requireNonNull(collection, "collection"); diff --git a/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_myers/diff_CollectionUtilsTest.java.txt b/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_myers/diff_CollectionUtilsTest.java.txt new file mode 100644 index 0000000000..b98dcdbd96 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_myers/diff_CollectionUtilsTest.java.txt @@ -0,0 +1,725 @@ +====1 +1:193a +2:194,198c +3:194,198c + @Test(expected = NullPointerException.class) + public void testGetCardinalityMapNull() { + CollectionUtils.getCardinalityMap(null); + } + +====1 +1:349a +2:355,377c +3:355,377c + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.containsAny(null, list); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl3() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:383a +2:412,433c +3:412,433c + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl1() { + final String[] oneArr = {"1"}; + CollectionUtils.containsAny(null, oneArr); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullArray() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:402a +2:453,466c +3:453,466c + @Test(expected = NullPointerException.class) + public void testUnionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(null, list); + } + + @Test(expected = NullPointerException.class) + public void testUnionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(list, null); + } + +====1 +1:421a +2:486,499c +3:486,499c + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(list, null); + } + +====1 +1:440a +2:519,532c +3:519,532c + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(null, list); + } + + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(list, null); + } + +====1 +1:475a +2:568,581c +3:568,581c + @Test(expected = NullPointerException.class) + public void testSubtractNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(null, list); + } + + @Test(expected = NullPointerException.class) + public void testSubtractNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(list, null); + } + +====1 +1:541a +2:648,661c +3:648,661c + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(list, null); + } + +====1 +1:623a +2:744,789c +3:744,789c + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(null, list, e); + } + + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(list, null, e); + } + +====1 +1:645a +2:812,825c +3:812,825c + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(list, null); + } + +====1 +1:655c + assertNull(CollectionUtils.find(null,testPredicate)); +2:835c +3:835c + assertNull(CollectionUtils.find(null, testPredicate)); +====1 +1:1279c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long)input).intValue()); +2:1459c +3:1459c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long) input).intValue()); +====1 +1:1331a +2:1512,1516c +3:1512,1516c + @Test(expected = NullPointerException.class) + public void testAddIgnoreNullNullColl() { + CollectionUtils.addIgnoreNull(null, "1"); + } + +====1 +1:1338,1349c + try { + CollectionUtils.predicatedCollection(new ArrayList(), null); + fail("Expecting NullPointerException for null predicate."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.predicatedCollection(null, predicate); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1523,1534c +3:1523,1534c + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullColl() { + final Predicate predicate = PredicateUtils.instanceofPredicate(Integer.class); + CollectionUtils.predicatedCollection(null, predicate); + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullPredicate() { + final Collection list = new ArrayList<>(); + CollectionUtils.predicatedCollection(list, null); +====1 +1:1358,1362c + try { + CollectionUtils.isFull(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1542a +3:1542a +====1 +1:1372a +2:1553,1557c +3:1553,1557c + @Test(expected = NullPointerException.class) + public void testIsFullNullColl() { + CollectionUtils.isFull(null); + } + +====1 +1:1385,1389c + try { + CollectionUtils.maxSize(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1569a +3:1569a +====1 +1:1399a +2:1580,1584c +3:1580,1584c + @Test(expected = NullPointerException.class) + public void testMaxSizeNullColl() { + CollectionUtils.maxSize(null); + } + +====1 +1:1466a +2:1652c +3:1652c + } +====1 +1:1468,1472c + try { + CollectionUtils.retainAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1654,1665c +3:1654,1665c + @Test(expected = NullPointerException.class) + public void testRetainAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.retainAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRetainAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.retainAll(base, null); +====1 +1:1477c + List list = new ArrayList<>(); +2:1670c +3:1670c + final List list = new ArrayList<>(); +====1 +1:1493c + @Test(expected=IllegalArgumentException.class) +2:1686c +3:1686c + @Test(expected=NullPointerException.class) +==== +1:1495,1496c + Collection list = null; + Collection result = CollectionUtils.removeRange(list, 0, 0); +2:1688,1697c + <<<<<<< HEAD + Collection list = null; + CollectionUtils.removeRange(list, 0, 0); + ||||||| 4551c3df1 + Collection list = null; + Collection result = CollectionUtils.removeRange(list, 0, 0); + ======= + final Collection list = null; + final Collection result = CollectionUtils.removeRange(list, 0, 0); + >>>>>>> TEMP_RIGHT_BRANCH +3:1688,1689c + final Collection list = null; + CollectionUtils.removeRange(list, 0, 0); +====1 +1:1501c + Collection list = new ArrayList<>(); +2:1702c +3:1694c + final Collection list = new ArrayList<>(); +==== +1:1503c + Collection result = CollectionUtils.removeRange(list, -1, 1); +2:1704,1710c + <<<<<<< HEAD + CollectionUtils.removeRange(list, -1, 1); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, -1, 1); + ======= + final Collection result = CollectionUtils.removeRange(list, -1, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1696c + CollectionUtils.removeRange(list, -1, 1); +====1 +1:1508c + Collection list = new ArrayList<>(); +2:1715c +3:1701c + final Collection list = new ArrayList<>(); +==== +1:1510c + Collection result = CollectionUtils.removeRange(list, 0, -1); +2:1717,1723c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 0, -1); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 0, -1); + ======= + final Collection result = CollectionUtils.removeRange(list, 0, -1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1703c + CollectionUtils.removeRange(list, 0, -1); +====1 +1:1515c + Collection list = new ArrayList<>(); +2:1728c +3:1708c + final Collection list = new ArrayList<>(); +==== +1:1518c + Collection result = CollectionUtils.removeRange(list, 1, 0); +2:1731,1737c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 1, 0); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 1, 0); + ======= + final Collection result = CollectionUtils.removeRange(list, 1, 0); + >>>>>>> TEMP_RIGHT_BRANCH +3:1711c + CollectionUtils.removeRange(list, 1, 0); +====1 +1:1523c + Collection list = new ArrayList<>(); +2:1742c +3:1716c + final Collection list = new ArrayList<>(); +==== +1:1525c + Collection result = CollectionUtils.removeRange(list, 0, 2); +2:1744,1750c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 0, 2); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 0, 2); + ======= + final Collection result = CollectionUtils.removeRange(list, 0, 2); + >>>>>>> TEMP_RIGHT_BRANCH +3:1718c + CollectionUtils.removeRange(list, 0, 2); +====1 +1:1530c + List list = new ArrayList<>(); +2:1755c +3:1723c + final List list = new ArrayList<>(); +====1 +1:1559c + @Test(expected=IllegalArgumentException.class) +2:1784c +3:1752c + @Test(expected=NullPointerException.class) +==== +1:1561,1562c + Collection list = null; + Collection result = CollectionUtils.removeCount(list, 0, 1); +2:1786,1795c + <<<<<<< HEAD + Collection list = null; + CollectionUtils.removeCount(list, 0, 1); + ||||||| 4551c3df1 + Collection list = null; + Collection result = CollectionUtils.removeCount(list, 0, 1); + ======= + final Collection list = null; + final Collection result = CollectionUtils.removeCount(list, 0, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1754,1755c + final Collection list = null; + CollectionUtils.removeCount(list, 0, 1); +==== +1:1567,1568c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, -1, 1); +2:1800,1809c + <<<<<<< HEAD + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); + ||||||| 4551c3df1 + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, -1, 1); + ======= + final Collection list = new ArrayList<>(); + final Collection result = CollectionUtils.removeCount(list, -1, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1760,1761c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); +==== +1:1573,1574c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, 0, -1); +2:1814,1823c + <<<<<<< HEAD + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); + ||||||| 4551c3df1 + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, 0, -1); + ======= + final Collection list = new ArrayList<>(); + final Collection result = CollectionUtils.removeCount(list, 0, -1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1766,1767c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); +====1 +1:1579c + Collection list = new ArrayList<>(); +2:1828c +3:1772c + final Collection list = new ArrayList<>(); +==== +1:1581c + Collection result = CollectionUtils.removeCount(list, 0, 2); +2:1830,1836c + <<<<<<< HEAD + CollectionUtils.removeCount(list, 0, 2); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeCount(list, 0, 2); + ======= + final Collection result = CollectionUtils.removeCount(list, 0, 2); + >>>>>>> TEMP_RIGHT_BRANCH +3:1774c + CollectionUtils.removeCount(list, 0, 2); +====1 +1:1607a +2:1863c +3:1801c + } +====1 +1:1609,1613c + try { + CollectionUtils.removeAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1865,1876c +3:1803,1814c + @Test(expected = NullPointerException.class) + public void testRemoveAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.removeAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRemoveAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.removeAll(base, null); +====1 +1:1622,1633c + try { + CollectionUtils.transformingCollection(new ArrayList<>(), null); + fail("Expecting NullPointerException for null transformer."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.transformingCollection(null, transformer); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1885,1896c +3:1823,1834c + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullColl() { + final Transformer transformer = TransformerUtils.nopTransformer(); + CollectionUtils.transformingCollection(null, transformer); + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullTransformer() { + final List list = new ArrayList<>(); + CollectionUtils.transformingCollection(list, null); +====1 +1:1740c + CollectionUtils.addAll(collectionA, new Integer[]{5}); +2:2003c +3:1941c + CollectionUtils.addAll(collectionA, 5); +==== +1:1744c + @Test(expected=IndexOutOfBoundsException.class) +2:2007,2047c + <<<<<<< HEAD + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected=IndexOutOfBoundsException.class) + ||||||| 4551c3df1 + @Test(expected=IndexOutOfBoundsException.class) + ======= + @Test(expected = IndexOutOfBoundsException.class) + >>>>>>> TEMP_RIGHT_BRANCH +3:1945,1979c + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected = IndexOutOfBoundsException.class) +====1 +1:1746c + CollectionUtils.get((Object)collectionA, -3); +2:2049c +3:1981c + CollectionUtils.get((Object) collectionA, -3); +====1 +1:1749c + @Test(expected=IndexOutOfBoundsException.class) +2:2052c +3:1984c + @Test(expected = IndexOutOfBoundsException.class) +====1 +1:1751c + CollectionUtils.get((Object)collectionA.iterator(), 30); +2:2054c +3:1986c + CollectionUtils.get((Object) collectionA.iterator(), 30); +====1 +1:1754c + @Test(expected=IllegalArgumentException.class) +2:2057c +3:1989c + @Test(expected = IllegalArgumentException.class) +====1 +1:1756c + CollectionUtils.get((Object)null, 0); +2:2059c +3:1991c + CollectionUtils.get((Object) null, 0); +====1 +1:1761,1762c + assertEquals(2, CollectionUtils.get((Object)collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object)collectionA.iterator(), 2)); +2:2064,2065c +3:1996,1997c + assertEquals(2, CollectionUtils.get((Object) collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object) collectionA.iterator(), 2)); +====1 +1:1764c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object)map, 0)); +2:2067c +3:1999c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object) map, 0)); +====1 +1:1795a +2:2099,2103c +3:2031,2035c + @Test(expected = NullPointerException.class) + public void testReverseArrayNull() { + CollectionUtils.reverseArray(null); + } + +====1 +1:1828a +2:2137,2141c +3:2069,2073c + public void collateException0() { + CollectionUtils.collate(null, collectionC); + } + + @Test(expected=NullPointerException.class) diff --git a/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_patience/diff_CollectionUtils.java.txt b/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_patience/diff_CollectionUtils.java.txt new file mode 100644 index 0000000000..a61e64f7d0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_patience/diff_CollectionUtils.java.txt @@ -0,0 +1,985 @@ +====1 +1:30a +2:31c +3:31c + import java.util.Objects; +====1 +1:75c + public CardinalityHelper(final Iterable a, final Iterable b) { +2:76c +3:76c + CardinalityHelper(final Iterable a, final Iterable b) { +====1 +1:142c + public SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +2:143c +3:143c + SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +====1 +1:187c + * CollectionUtils should not normally be instantiated. +2:188c +3:188c + * {@code CollectionUtils} should not normally be instantiated. +====1 +1:205c + * Returns an immutable empty collection if the argument is null, +2:206c +3:206c + * Returns an immutable empty collection if the argument is {@code null}, +====1 +1:209,210c + * @param collection the collection, possibly null + * @return an empty collection if the argument is null +2:210,211c +3:210,211c + * @param collection the collection, possibly {@code null} + * @return an empty collection if the argument is {@code null} +====1 +1:232a +2:234,235c +3:234,235c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:257a +2:261,262c +3:261,262c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:287a +2:293,294c +3:293,294c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:340a +2:348,350c +3:348,350c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(p, "The predicate must not be null."); +====1 +1:357c + * Returns true iff all elements of {@code coll2} are also contained +2:367c +3:367c + * Returns {@code true} iff all elements of {@code coll2} are also contained +====1 +1:361c + * In other words, this method returns true iff the +2:371c +3:371c + * In other words, this method returns {@code true} iff the +====1 +1:376c + * @return true iff the intersection of the collections has the same cardinality +2:386c +3:386c + * @return {@code true} iff the intersection of the collections has the same cardinality +====1 +1:380a +2:391,392c +3:391,392c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====1 +1:409c + * Returns true iff at least one element is in both collections. +2:421c +3:421c + * Returns {@code true} iff at least one element is in both collections. +====1 +1:411c + * In other words, this method returns true iff the +2:423c +3:423c + * In other words, this method returns {@code true} iff the +====1 +1:415c + * @param the type of object to lookup in coll1. +2:427c +3:427c + * @param the type of object to lookup in {@code coll1}. +====1 +1:418c + * @return true iff the intersection of the collections is non-empty +2:430c +3:430c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:422a +2:435,436c +3:435,436c + Objects.requireNonNull(coll1, "The collection must not be null."); + Objects.requireNonNull(coll2, "The elements must not be null."); +====1 +1:440c + * Returns true iff at least one element is in both collections. +2:454c +3:454c + * Returns {@code true} iff at least one element is in both collections. +====1 +1:442c + * In other words, this method returns true iff the +2:456c +3:456c + * In other words, this method returns {@code true} iff the +====1 +1:448c + * @return true iff the intersection of the collections is non-empty +2:462c +3:462c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:452a +2:467,468c +3:467,468c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====1 +1:482a +2:499c +3:499c + Objects.requireNonNull(coll, "The collection must not be null."); +====1 +1:503c + * @return true iff a is a sub-collection of b +2:520c +3:520c + * @return {@code true} iff a is a sub-collection of b +====1 +1:507a +2:525,526c +3:525,526c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:528c + *
  • a.size() and b.size() represent the +2:547c +3:547c + *
  • {@code a.size()} and {@code b.size()} represent the +====1 +1:530c + *
  • a.size() < Integer.MAXVALUE
  • +2:549c +3:549c + *
  • {@code a.size() < Integer.MAXVALUE}
  • +====1 +1:535c + * @return true iff a is a proper sub-collection of b +2:554c +3:554c + * @return {@code true} iff a is a proper sub-collection of b +====1 +1:539a +2:559,560c +3:559,560c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:554c + * @return true iff the collections contain the same elements with the same cardinalities. +2:575c +3:575c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:557c + if(a.size() != b.size()) { +2:578,586c + <<<<<<< HEAD + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if(a.size() != b.size()) { + ||||||| 4551c3df1 + if(a.size() != b.size()) { + ======= + if (a.size() != b.size()) { + >>>>>>> TEMP_RIGHT_BRANCH +3:578,580c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if (a.size() != b.size()) { +====1 +1:561c + if(helper.cardinalityA.size() != helper.cardinalityB.size()) { +2:590c +3:584c + if (helper.cardinalityA.size() != helper.cardinalityB.size()) { +====1 +1:564,565c + for( final Object obj : helper.cardinalityA.keySet()) { + if(helper.freqA(obj) != helper.freqB(obj)) { +2:593,594c +3:587,588c + for (final Object obj : helper.cardinalityA.keySet()) { + if (helper.freqA(obj) != helper.freqB(obj)) { +====1 +1:591c + * @return true iff the collections contain the same elements with the same cardinalities. +2:620c +3:614c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:598,600c + if (equator == null) { + throw new NullPointerException("Equator must not be null."); + } +2:627,639c + <<<<<<< HEAD + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); + if(a.size() != b.size()) { + ||||||| 4551c3df1 + if (equator == null) { + throw new NullPointerException("Equator must not be null."); + } + + if(a.size() != b.size()) { + ======= + Objects.requireNonNull(equator, "equator"); +3:621,623c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "equator"); +==== +1:602c + if(a.size() != b.size()) { +2:641,642c + if (a.size() != b.size()) { + >>>>>>> TEMP_RIGHT_BRANCH +3:625c + if (a.size() != b.size()) { +====1 +1:626c + public EquatorWrapper(final Equator equator, final O object) { +2:666c +3:649c + EquatorWrapper(final Equator equator, final O object) { +====1 +1:655c + * @param coll the {@link Iterable} to search +2:695c +3:678c + * @param collection the {@link Iterable} to search +==== +1:663,667c + public static int cardinality(final O obj, final Iterable coll) { + if (coll == null) { + throw new NullPointerException("coll must not be null."); + } + return IterableUtils.frequency(coll, obj); +2:703,716c + <<<<<<< HEAD + public static int cardinality(final O obj, final Iterable coll) { + Objects.requireNonNull(coll, "The collection must not be null."); + return IterableUtils.frequency(coll, obj); + ||||||| 4551c3df1 + public static int cardinality(final O obj, final Iterable coll) { + if (coll == null) { + throw new NullPointerException("coll must not be null."); + } + return IterableUtils.frequency(coll, obj); + ======= + public static int cardinality(final O obj, final Iterable collection) { + return IterableUtils.frequency(Objects.requireNonNull(collection, "collection"), obj); + >>>>>>> TEMP_RIGHT_BRANCH +3:686,687c + public static int cardinality(final O obj, final Iterable collection) { + return IterableUtils.frequency(Objects.requireNonNull(collection, "collection"), obj); +====1 +1:799c + * This is equivalent to filter(collection, PredicateUtils.notPredicate(predicate)) +2:848c +3:819c + * This is equivalent to {@code filter(collection, PredicateUtils.notPredicate(predicate))} +====1 +1:856c + * A null collection or predicate matches no elements. +2:905c +3:876c + * A {@code null} collection or predicate matches no elements. +====1 +1:874c + * A null collection or predicate returns false. +2:923c +3:894c + * A {@code null} collection or predicate returns false. +====1 +1:893c + * A null predicate returns false. +2:942c +3:913c + * A {@code null} predicate returns false. +====1 +1:896c + * A null or empty collection returns true. +2:945c +3:916c + * A {@code null} or empty collection returns true. +====1 +1:916c + * A null predicate matches no elements. +2:965c +3:936c + * A {@code null} predicate matches no elements. +====1 +1:923c + * @throws NullPointerException if the input collection is null +2:971a +3:942a +====1 +1:927,928c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:975,979c +3:946,950c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====1 +1:965,966c + * Elements matching the predicate are added to the outputCollection, + * all other elements are added to the rejectedCollection. +2:1016,1017c +3:987,988c + * Elements matching the predicate are added to the {@code outputCollection}, + * all other elements are added to the {@code rejectedCollection}. +====1 +1:969,970c + * If the input predicate is null, no elements are added to + * outputCollection or rejectedCollection. +2:1020,1021c +3:991,992c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection} or {@code rejectedCollection}. +====1 +1:1010c + * If the input predicate is null, the result is an empty +2:1061c +3:1032c + * If the input predicate is {@code null}, the result is an empty +====1 +1:1018c + * @throws NullPointerException if the input collection is null +2:1068a +3:1039a +====1 +1:1022,1023c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1072,1076c +3:1043,1047c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====1 +1:1031,1032c + * If the input predicate is null, no elements are added to + * outputCollection. +2:1084,1085c +3:1055,1056c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection}. +====1 +1:1072,1073c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1125,1129c +3:1096,1100c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +==== +1:1165,1167c + if (collection == null) { + throw new NullPointerException("The collection must not be null"); + } +2:1221,1229c + <<<<<<< HEAD + Objects.requireNonNull(collection, "The collection must not be null."); + ||||||| 4551c3df1 + if (collection == null) { + throw new NullPointerException("The collection must not be null"); + } + ======= + Objects.requireNonNull(collection, "collection"); + >>>>>>> TEMP_RIGHT_BRANCH +3:1192c + Objects.requireNonNull(collection, "collection"); +====1 +1:1182a +2:1245,1246c +3:1208,1209c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterable, "The iterable of elements to add must not be null."); +====1 +1:1198a +2:1263,1264c +3:1226,1227c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterator, "The iterator of elements to add must not be null."); +====1 +1:1215a +2:1282,1283c +3:1245,1246c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(enumeration, "The enumeration of elements to add must not be null."); +====1 +1:1232a +2:1301,1302c +3:1264,1265c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(elements, "The array of elements to add must not be null."); +====1 +1:1241,1242c + * Returns the index-th value in {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +2:1311,1312c +3:1274,1275c + * Returns the {@code index}-th value in {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1244,1245c + * The Iterator is advanced to index (or to the end, if + * index exceeds the number of entries) as a side effect of this method. +2:1314,1315c +3:1277,1278c + * The Iterator is advanced to {@code index} (or to the end, if + * {@code index} exceeds the number of entries) as a side effect of this method. +====1 +1:1257a +2:1328c +3:1291c + Objects.requireNonNull(iterator, "The iterator must not be null."); +====1 +1:1273,1274c + * Returns the index-th value in the iterable's {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +2:1344,1345c +3:1307,1308c + * Returns the {@code index}-th value in the {@code iterable}'s {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1287a +2:1359c +3:1322c + Objects.requireNonNull(iterable, "The iterable must not be null."); +====1 +1:1292,1294c + * Returns the index-th value in object, throwing + * IndexOutOfBoundsException if there is no such element or + * IllegalArgumentException if object is not an +2:1364,1366c +3:1327,1329c + * Returns the {@code index}-th value in {@code object}, throwing + * {@code IndexOutOfBoundsException} if there is no such element or + * {@code IllegalArgumentException} if {@code object} is not an +====1 +1:1300,1301c + *
  • Map -- the value returned is the Map.Entry in position + * index in the map's entrySet iterator, +2:1372,1373c +3:1335,1336c + *
  • Map -- the value returned is the {@code Map.Entry} in position + * {@code index} in the map's {@code entrySet} iterator, +====1 +1:1304,1305c + *
  • Array -- the index-th array entry is returned, + * if there is such an entry; otherwise an IndexOutOfBoundsException +2:1376,1377c +3:1339,1340c + *
  • Array -- the {@code index}-th array entry is returned, + * if there is such an entry; otherwise an {@code IndexOutOfBoundsException} +====1 +1:1307c + *
  • Collection -- the value returned is the index-th object +2:1379c +3:1342c + *
  • Collection -- the value returned is the {@code index}-th object +====1 +1:1310c + * index-th object in the Iterator/Enumeration, if there +2:1382c +3:1345c + * {@code index}-th object in the Iterator/Enumeration, if there +====1 +1:1312c + * index (or to the end, if index exceeds the +2:1384c +3:1347c + * {@code index} (or to the end, if {@code index} exceeds the +====1 +1:1327c + if (object instanceof Map) { +2:1399c +3:1362c + if (object instanceof Map) { +====1 +1:1354,1355c + * Returns the index-th Map.Entry in the map's entrySet, + * throwing IndexOutOfBoundsException if there is no such element. +2:1426,1427c +3:1389,1390c + * Returns the {@code index}-th {@code Map.Entry} in the {@code map}'s {@code entrySet}, + * throwing {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1358c + * @param the key type in the {@link Map} +2:1430c +3:1393c + * @param the value type in the {@link Map} +==== +1:1364c + public static Map.Entry get(final Map map, final int index) { +2:1436,1442c + public static Map.Entry get(final Map map, final int index) { + <<<<<<< HEAD + Objects.requireNonNull(map, "The map must not be null."); + ||||||| 4551c3df1 + public static Map.Entry get(final Map map, final int index) { + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:1399,1400c + public static Map.Entry get(final Map map, final int index) { + Objects.requireNonNull(map, "The map must not be null."); +====1 +1:1392c + if (object instanceof Map) { +2:1470c +3:1428c + if (object instanceof Map) { +====1 +1:1499a +2:1578c +3:1536c + Objects.requireNonNull(array, "The array must not be null."); +====1 +1:1526c + * @param coll the collection to check +2:1605c +3:1563c + * @param collection the collection to check +==== +1:1530,1535c + public static boolean isFull(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); +2:1609,1626c + <<<<<<< HEAD + public static boolean isFull(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); + ||||||| 4551c3df1 + public static boolean isFull(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); + ======= + public static boolean isFull(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).isFull(); + >>>>>>> TEMP_RIGHT_BRANCH +3:1567,1570c + public static boolean isFull(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).isFull(); +====1 +1:1539c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +2:1630c +3:1574c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====1 +1:1559c + * @param coll the collection to check +2:1650c +3:1594c + * @param collection the collection to check +==== +1:1563,1568c + public static int maxSize(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); +2:1654,1671c + <<<<<<< HEAD + public static int maxSize(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); + ||||||| 4551c3df1 + public static int maxSize(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); + ======= + public static int maxSize(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).maxSize(); + >>>>>>> TEMP_RIGHT_BRANCH +3:1598,1601c + public static int maxSize(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).maxSize(); +====1 +1:1572c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +2:1675c +3:1605c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====1 +1:1649,1651c + * @param a the first collection, must not be null + * @param b the second collection, must not be null + * @param c the comparator to use for the merge. +2:1752,1754c +3:1682,1684c + * @param iterableA the first collection, must not be null + * @param iterableB the second collection, must not be null + * @param comparator the comparator to use for the merge. +==== +1:1658,1659c + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { +2:1761,1779c + <<<<<<< HEAD + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(c, "The comparator must not be null."); + ||||||| 4551c3df1 + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + + if (a == null || b == null) { + throw new NullPointerException("The collections must not be null"); + } + if (c == null) { + throw new NullPointerException("The comparator must not be null"); + } + ======= + public static List collate(final Iterable iterableA, final Iterable iterableB, + final Comparator comparator, final boolean includeDuplicates) { +3:1691,1692c + public static List collate(final Iterable iterableA, final Iterable iterableB, + final Comparator comparator, final boolean includeDuplicates) { +==== +1:1661,1666c + if (a == null || b == null) { + throw new NullPointerException("The collections must not be null"); + } + if (c == null) { + throw new NullPointerException("The comparator must not be null"); + } +2:1781,1784c + Objects.requireNonNull(iterableA, "iterableA"); + Objects.requireNonNull(iterableB, "iterableB"); + Objects.requireNonNull(comparator, "comparator"); + >>>>>>> TEMP_RIGHT_BRANCH +3:1694,1696c + Objects.requireNonNull(iterableA, "iterableA"); + Objects.requireNonNull(iterableB, "iterableB"); + Objects.requireNonNull(comparator, "comparator"); +====1 +1:1669,1670c + final int totalSize = a instanceof Collection && b instanceof Collection ? + Math.max(1, ((Collection) a).size() + ((Collection) b).size()) : 10; +2:1787,1788c +3:1699,1700c + final int totalSize = iterableA instanceof Collection && iterableB instanceof Collection ? + Math.max(1, ((Collection) iterableA).size() + ((Collection) iterableB).size()) : 10; +====1 +1:1672c + final Iterator iterator = new CollatingIterator<>(c, a.iterator(), b.iterator()); +2:1790c +3:1702c + final Iterator iterator = new CollatingIterator<>(comparator, iterableA.iterator(), iterableB.iterator()); +====1 +1:1713a +2:1832c +3:1744c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1724,1727c + * Returns a collection containing all the elements in collection + * that are also in retain. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless retain does not contain e, in which +2:1843,1846c +3:1755,1758c + * Returns a collection containing all the elements in {@code collection} + * that are also in {@code retain}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code retain} does not contain {@code e}, in which +====1 +1:1729c + * the collection c and thus cannot call c.retainAll(retain);. +2:1848c +3:1760c + * the collection {@code c} and thus cannot call {@code c.retainAll(retain);}. +====1 +1:1731,1732c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in retain. If it's contained, it's added +2:1850,1851c +3:1762,1763c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code retain}. If it's contained, it's added +====1 +1:1734c + * retain that provides a fast (e.g. O(1)) implementation of +2:1853c +3:1765c + * {@code retain} that provides a fast (e.g. O(1)) implementation of +====1 +1:1741,1742c + * @return a Collection containing all the elements of collection + * that occur at least once in retain. +2:1860,1861c +3:1772,1773c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain}. +====1 +1:1746a +2:1866,1867c +3:1778,1779c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); +====1 +1:1752,1755c + * collection that are also in retain. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless retain does not contain e, in which case +2:1873,1876c +3:1785,1788c + * {@code collection} that are also in {@code retain}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code retain} does not contain {@code e}, in which case +====1 +1:1757,1758c + * modify the collection c and thus cannot call + * c.retainAll(retain);. +2:1878,1879c +3:1790,1791c + * modify the collection {@code c} and thus cannot call + * {@code c.retainAll(retain);}. +====1 +1:1762c + * in collection and retain. Hence this method is +2:1883c +3:1795c + * in {@code collection} and {@code retain}. Hence this method is +====1 +1:1771,1772c + * @return a Collection containing all the elements of collection + * that occur at least once in retain according to the equator +2:1892,1893c +3:1804,1805c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain} according to the {@code equator} +====1 +1:1779c + +2:1900,1902c +3:1812,1814c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1800,1802c + * @param input the collection will be operated, can't be null + * @param startIndex the start index (inclusive) to remove element, can't be less than 0 + * @param endIndex the end index (exclusive) to remove, can't be less than startIndex +2:1923,1925c +3:1835,1837c + * @param input the collection will be operated, must not be null + * @param startIndex the start index (inclusive) to remove element, must not be less than 0 + * @param endIndex the end index (exclusive) to remove, must not be less than startIndex +====1 +1:1807,1809c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1930c +3:1842c + Objects.requireNonNull(input, "The collection must not be null."); +====1 +1:1831,1833c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1952c +3:1864c + Objects.requireNonNull(input, "The collection must not be null."); +====1 +1:1845,1846c + Collection result = new ArrayList(count); + Iterator iterator = input.iterator(); +2:1964,1965c +3:1876,1877c + final Collection result = new ArrayList<>(count); + final Iterator iterator = input.iterator(); +====1 +1:1861,1865c + * Removes the elements in remove from collection. That is, this + * method returns a collection containing all the elements in c + * that are not in remove. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless remove contains e, in which +2:1980,1984c +3:1892,1896c + * Removes the elements in {@code remove} from {@code collection}. That is, this + * method returns a collection containing all the elements in {@code c} + * that are not in {@code remove}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code remove} contains {@code e}, in which +====1 +1:1867c + * the collection c and thus cannot call collection.removeAll(remove);. +2:1986c +3:1898c + * the collection {@code c} and thus cannot call {@code collection.removeAll(remove);}. +====1 +1:1869,1870c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in remove. If it's not contained, it's added +2:1988,1989c +3:1900,1901c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code remove}. If it's not contained, it's added +====1 +1:1872c + * remove that provides a fast (e.g. O(1)) implementation of +2:1991c +3:1903c + * {@code remove} that provides a fast (e.g. O(1)) implementation of +====1 +1:1878,1880c + * @param remove the items to be removed from the returned collection + * @return a Collection containing all the elements of collection except + * any elements that also occur in remove. +2:1997,1999c +3:1909,1911c + * @param remove the items to be removed from the returned {@code collection} + * @return a {@code Collection} containing all the elements of {@code collection} except + * any elements that also occur in {@code remove}. +====1 +1:1886c + } +2:2005c +3:1917c + } +====1 +1:1889c + * Removes all elements in remove from collection. +2:2008c +3:1920c + * Removes all elements in {@code remove} from {@code collection}. +====1 +1:1891,1894c + * collection that are not in remove. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless remove contains e, in which case the +2:2010,2013c +3:1922,1925c + * {@code collection} that are not in {@code remove}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code remove} contains {@code e}, in which case the +====1 +1:1896,1897c + * the collection c and thus cannot call + * collection.removeAll(remove). +2:2015,2016c +3:1927,1928c + * the collection {@code c} and thus cannot call + * {@code collection.removeAll(remove)}. +====1 +1:1901c + * in collection and remove. Hence this method is +2:2020c +3:1932c + * in {@code collection} and {@code remove}. Hence this method is +====1 +1:1910,1911c + * @return a Collection containing all the elements of collection + * except any element that if equal according to the equator +2:2029,2030c +3:1941,1942c + * @return a {@code Collection} containing all the elements of {@code collection} + * except any element that if equal according to the {@code equator} +====1 +1:1918c + +2:2037,2039c +3:1949,1951c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(remove, "The items to be removed must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1960a +2:2082c +3:1994c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1977a +2:2100c +3:2012c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1997a +2:2121,2122c +3:2033,2034c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(predicate, "The predicate must not be null."); +====1 +1:2020a +2:2146,2147c +3:2058,2059c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(transformer, "The transformer must not be null."); +==== +1:2035,2037c + if (collection == null) { + throw new NullPointerException("Collection must not be null."); + } +2:2162,2170c + <<<<<<< HEAD + Objects.requireNonNull(collection, "The collection must not be null."); + ||||||| 4551c3df1 + if (collection == null) { + throw new NullPointerException("Collection must not be null."); + } + ======= + Objects.requireNonNull(collection, "collection"); + >>>>>>> TEMP_RIGHT_BRANCH +3:2074c + Objects.requireNonNull(collection, "collection"); diff --git a/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_patience/diff_CollectionUtilsTest.java.txt b/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_patience/diff_CollectionUtilsTest.java.txt new file mode 100644 index 0000000000..b98dcdbd96 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/gitmerge_recursive_patience/diff_CollectionUtilsTest.java.txt @@ -0,0 +1,725 @@ +====1 +1:193a +2:194,198c +3:194,198c + @Test(expected = NullPointerException.class) + public void testGetCardinalityMapNull() { + CollectionUtils.getCardinalityMap(null); + } + +====1 +1:349a +2:355,377c +3:355,377c + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.containsAny(null, list); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl3() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:383a +2:412,433c +3:412,433c + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl1() { + final String[] oneArr = {"1"}; + CollectionUtils.containsAny(null, oneArr); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullArray() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:402a +2:453,466c +3:453,466c + @Test(expected = NullPointerException.class) + public void testUnionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(null, list); + } + + @Test(expected = NullPointerException.class) + public void testUnionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(list, null); + } + +====1 +1:421a +2:486,499c +3:486,499c + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(list, null); + } + +====1 +1:440a +2:519,532c +3:519,532c + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(null, list); + } + + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(list, null); + } + +====1 +1:475a +2:568,581c +3:568,581c + @Test(expected = NullPointerException.class) + public void testSubtractNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(null, list); + } + + @Test(expected = NullPointerException.class) + public void testSubtractNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(list, null); + } + +====1 +1:541a +2:648,661c +3:648,661c + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(list, null); + } + +====1 +1:623a +2:744,789c +3:744,789c + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(null, list, e); + } + + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(list, null, e); + } + +====1 +1:645a +2:812,825c +3:812,825c + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(list, null); + } + +====1 +1:655c + assertNull(CollectionUtils.find(null,testPredicate)); +2:835c +3:835c + assertNull(CollectionUtils.find(null, testPredicate)); +====1 +1:1279c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long)input).intValue()); +2:1459c +3:1459c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long) input).intValue()); +====1 +1:1331a +2:1512,1516c +3:1512,1516c + @Test(expected = NullPointerException.class) + public void testAddIgnoreNullNullColl() { + CollectionUtils.addIgnoreNull(null, "1"); + } + +====1 +1:1338,1349c + try { + CollectionUtils.predicatedCollection(new ArrayList(), null); + fail("Expecting NullPointerException for null predicate."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.predicatedCollection(null, predicate); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1523,1534c +3:1523,1534c + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullColl() { + final Predicate predicate = PredicateUtils.instanceofPredicate(Integer.class); + CollectionUtils.predicatedCollection(null, predicate); + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullPredicate() { + final Collection list = new ArrayList<>(); + CollectionUtils.predicatedCollection(list, null); +====1 +1:1358,1362c + try { + CollectionUtils.isFull(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1542a +3:1542a +====1 +1:1372a +2:1553,1557c +3:1553,1557c + @Test(expected = NullPointerException.class) + public void testIsFullNullColl() { + CollectionUtils.isFull(null); + } + +====1 +1:1385,1389c + try { + CollectionUtils.maxSize(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1569a +3:1569a +====1 +1:1399a +2:1580,1584c +3:1580,1584c + @Test(expected = NullPointerException.class) + public void testMaxSizeNullColl() { + CollectionUtils.maxSize(null); + } + +====1 +1:1466a +2:1652c +3:1652c + } +====1 +1:1468,1472c + try { + CollectionUtils.retainAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1654,1665c +3:1654,1665c + @Test(expected = NullPointerException.class) + public void testRetainAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.retainAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRetainAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.retainAll(base, null); +====1 +1:1477c + List list = new ArrayList<>(); +2:1670c +3:1670c + final List list = new ArrayList<>(); +====1 +1:1493c + @Test(expected=IllegalArgumentException.class) +2:1686c +3:1686c + @Test(expected=NullPointerException.class) +==== +1:1495,1496c + Collection list = null; + Collection result = CollectionUtils.removeRange(list, 0, 0); +2:1688,1697c + <<<<<<< HEAD + Collection list = null; + CollectionUtils.removeRange(list, 0, 0); + ||||||| 4551c3df1 + Collection list = null; + Collection result = CollectionUtils.removeRange(list, 0, 0); + ======= + final Collection list = null; + final Collection result = CollectionUtils.removeRange(list, 0, 0); + >>>>>>> TEMP_RIGHT_BRANCH +3:1688,1689c + final Collection list = null; + CollectionUtils.removeRange(list, 0, 0); +====1 +1:1501c + Collection list = new ArrayList<>(); +2:1702c +3:1694c + final Collection list = new ArrayList<>(); +==== +1:1503c + Collection result = CollectionUtils.removeRange(list, -1, 1); +2:1704,1710c + <<<<<<< HEAD + CollectionUtils.removeRange(list, -1, 1); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, -1, 1); + ======= + final Collection result = CollectionUtils.removeRange(list, -1, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1696c + CollectionUtils.removeRange(list, -1, 1); +====1 +1:1508c + Collection list = new ArrayList<>(); +2:1715c +3:1701c + final Collection list = new ArrayList<>(); +==== +1:1510c + Collection result = CollectionUtils.removeRange(list, 0, -1); +2:1717,1723c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 0, -1); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 0, -1); + ======= + final Collection result = CollectionUtils.removeRange(list, 0, -1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1703c + CollectionUtils.removeRange(list, 0, -1); +====1 +1:1515c + Collection list = new ArrayList<>(); +2:1728c +3:1708c + final Collection list = new ArrayList<>(); +==== +1:1518c + Collection result = CollectionUtils.removeRange(list, 1, 0); +2:1731,1737c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 1, 0); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 1, 0); + ======= + final Collection result = CollectionUtils.removeRange(list, 1, 0); + >>>>>>> TEMP_RIGHT_BRANCH +3:1711c + CollectionUtils.removeRange(list, 1, 0); +====1 +1:1523c + Collection list = new ArrayList<>(); +2:1742c +3:1716c + final Collection list = new ArrayList<>(); +==== +1:1525c + Collection result = CollectionUtils.removeRange(list, 0, 2); +2:1744,1750c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 0, 2); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 0, 2); + ======= + final Collection result = CollectionUtils.removeRange(list, 0, 2); + >>>>>>> TEMP_RIGHT_BRANCH +3:1718c + CollectionUtils.removeRange(list, 0, 2); +====1 +1:1530c + List list = new ArrayList<>(); +2:1755c +3:1723c + final List list = new ArrayList<>(); +====1 +1:1559c + @Test(expected=IllegalArgumentException.class) +2:1784c +3:1752c + @Test(expected=NullPointerException.class) +==== +1:1561,1562c + Collection list = null; + Collection result = CollectionUtils.removeCount(list, 0, 1); +2:1786,1795c + <<<<<<< HEAD + Collection list = null; + CollectionUtils.removeCount(list, 0, 1); + ||||||| 4551c3df1 + Collection list = null; + Collection result = CollectionUtils.removeCount(list, 0, 1); + ======= + final Collection list = null; + final Collection result = CollectionUtils.removeCount(list, 0, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1754,1755c + final Collection list = null; + CollectionUtils.removeCount(list, 0, 1); +==== +1:1567,1568c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, -1, 1); +2:1800,1809c + <<<<<<< HEAD + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); + ||||||| 4551c3df1 + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, -1, 1); + ======= + final Collection list = new ArrayList<>(); + final Collection result = CollectionUtils.removeCount(list, -1, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1760,1761c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); +==== +1:1573,1574c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, 0, -1); +2:1814,1823c + <<<<<<< HEAD + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); + ||||||| 4551c3df1 + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, 0, -1); + ======= + final Collection list = new ArrayList<>(); + final Collection result = CollectionUtils.removeCount(list, 0, -1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1766,1767c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); +====1 +1:1579c + Collection list = new ArrayList<>(); +2:1828c +3:1772c + final Collection list = new ArrayList<>(); +==== +1:1581c + Collection result = CollectionUtils.removeCount(list, 0, 2); +2:1830,1836c + <<<<<<< HEAD + CollectionUtils.removeCount(list, 0, 2); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeCount(list, 0, 2); + ======= + final Collection result = CollectionUtils.removeCount(list, 0, 2); + >>>>>>> TEMP_RIGHT_BRANCH +3:1774c + CollectionUtils.removeCount(list, 0, 2); +====1 +1:1607a +2:1863c +3:1801c + } +====1 +1:1609,1613c + try { + CollectionUtils.removeAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1865,1876c +3:1803,1814c + @Test(expected = NullPointerException.class) + public void testRemoveAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.removeAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRemoveAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.removeAll(base, null); +====1 +1:1622,1633c + try { + CollectionUtils.transformingCollection(new ArrayList<>(), null); + fail("Expecting NullPointerException for null transformer."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.transformingCollection(null, transformer); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1885,1896c +3:1823,1834c + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullColl() { + final Transformer transformer = TransformerUtils.nopTransformer(); + CollectionUtils.transformingCollection(null, transformer); + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullTransformer() { + final List list = new ArrayList<>(); + CollectionUtils.transformingCollection(list, null); +====1 +1:1740c + CollectionUtils.addAll(collectionA, new Integer[]{5}); +2:2003c +3:1941c + CollectionUtils.addAll(collectionA, 5); +==== +1:1744c + @Test(expected=IndexOutOfBoundsException.class) +2:2007,2047c + <<<<<<< HEAD + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected=IndexOutOfBoundsException.class) + ||||||| 4551c3df1 + @Test(expected=IndexOutOfBoundsException.class) + ======= + @Test(expected = IndexOutOfBoundsException.class) + >>>>>>> TEMP_RIGHT_BRANCH +3:1945,1979c + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected = IndexOutOfBoundsException.class) +====1 +1:1746c + CollectionUtils.get((Object)collectionA, -3); +2:2049c +3:1981c + CollectionUtils.get((Object) collectionA, -3); +====1 +1:1749c + @Test(expected=IndexOutOfBoundsException.class) +2:2052c +3:1984c + @Test(expected = IndexOutOfBoundsException.class) +====1 +1:1751c + CollectionUtils.get((Object)collectionA.iterator(), 30); +2:2054c +3:1986c + CollectionUtils.get((Object) collectionA.iterator(), 30); +====1 +1:1754c + @Test(expected=IllegalArgumentException.class) +2:2057c +3:1989c + @Test(expected = IllegalArgumentException.class) +====1 +1:1756c + CollectionUtils.get((Object)null, 0); +2:2059c +3:1991c + CollectionUtils.get((Object) null, 0); +====1 +1:1761,1762c + assertEquals(2, CollectionUtils.get((Object)collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object)collectionA.iterator(), 2)); +2:2064,2065c +3:1996,1997c + assertEquals(2, CollectionUtils.get((Object) collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object) collectionA.iterator(), 2)); +====1 +1:1764c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object)map, 0)); +2:2067c +3:1999c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object) map, 0)); +====1 +1:1795a +2:2099,2103c +3:2031,2035c + @Test(expected = NullPointerException.class) + public void testReverseArrayNull() { + CollectionUtils.reverseArray(null); + } + +====1 +1:1828a +2:2137,2141c +3:2069,2073c + public void collateException0() { + CollectionUtils.collate(null, collectionC); + } + + @Test(expected=NullPointerException.class) diff --git a/src/python/merge_conflict_analysis_diffs/184/intellimerge/diff_CollectionUtils.java.txt b/src/python/merge_conflict_analysis_diffs/184/intellimerge/diff_CollectionUtils.java.txt new file mode 100644 index 0000000000..0cbbb6daca --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/intellimerge/diff_CollectionUtils.java.txt @@ -0,0 +1,986 @@ +====1 +1:30a +2:31c +3:31c + import java.util.Objects; +====1 +1:75c + public CardinalityHelper(final Iterable a, final Iterable b) { +2:76c +3:76c + CardinalityHelper(final Iterable a, final Iterable b) { +====1 +1:142c + public SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +2:143c +3:143c + SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +====1 +1:187c + * CollectionUtils should not normally be instantiated. +2:188c +3:188c + * {@code CollectionUtils} should not normally be instantiated. +====1 +1:205c + * Returns an immutable empty collection if the argument is null, +2:206c +3:206c + * Returns an immutable empty collection if the argument is {@code null}, +====1 +1:209,210c + * @param collection the collection, possibly null + * @return an empty collection if the argument is null +2:210,211c +3:210,211c + * @param collection the collection, possibly {@code null} + * @return an empty collection if the argument is {@code null} +====1 +1:232a +2:234,235c +3:234,235c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:257a +2:261,262c +3:261,262c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:287a +2:293,294c +3:293,294c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:340a +2:348,350c +3:348,350c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(p, "The predicate must not be null."); +====1 +1:357c + * Returns true iff all elements of {@code coll2} are also contained +2:367c +3:367c + * Returns {@code true} iff all elements of {@code coll2} are also contained +====1 +1:361c + * In other words, this method returns true iff the +2:371c +3:371c + * In other words, this method returns {@code true} iff the +====1 +1:376c + * @return true iff the intersection of the collections has the same cardinality +2:386c +3:386c + * @return {@code true} iff the intersection of the collections has the same cardinality +====1 +1:380a +2:391,392c +3:391,392c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====1 +1:409c + * Returns true iff at least one element is in both collections. +2:421c +3:421c + * Returns {@code true} iff at least one element is in both collections. +====1 +1:411c + * In other words, this method returns true iff the +2:423c +3:423c + * In other words, this method returns {@code true} iff the +====1 +1:415c + * @param the type of object to lookup in coll1. +2:427c +3:427c + * @param the type of object to lookup in {@code coll1}. +====1 +1:418c + * @return true iff the intersection of the collections is non-empty +2:430c +3:430c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:422a +2:435,436c +3:435,436c + Objects.requireNonNull(coll1, "The collection must not be null."); + Objects.requireNonNull(coll2, "The elements must not be null."); +====1 +1:440c + * Returns true iff at least one element is in both collections. +2:454c +3:454c + * Returns {@code true} iff at least one element is in both collections. +====1 +1:442c + * In other words, this method returns true iff the +2:456c +3:456c + * In other words, this method returns {@code true} iff the +====1 +1:448c + * @return true iff the intersection of the collections is non-empty +2:462c +3:462c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:452a +2:467,468c +3:467,468c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====1 +1:482a +2:499c +3:499c + Objects.requireNonNull(coll, "The collection must not be null."); +====1 +1:503c + * @return true iff a is a sub-collection of b +2:520c +3:520c + * @return {@code true} iff a is a sub-collection of b +====1 +1:507a +2:525,526c +3:525,526c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:528c + *
  • a.size() and b.size() represent the +2:547c +3:547c + *
  • {@code a.size()} and {@code b.size()} represent the +====1 +1:530c + *
  • a.size() < Integer.MAXVALUE
  • +2:549c +3:549c + *
  • {@code a.size() < Integer.MAXVALUE}
  • +====1 +1:535c + * @return true iff a is a proper sub-collection of b +2:554c +3:554c + * @return {@code true} iff a is a proper sub-collection of b +====1 +1:539a +2:559,560c +3:559,560c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:554c + * @return true iff the collections contain the same elements with the same cardinalities. +2:575c +3:575c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:557c + if(a.size() != b.size()) { +2:578,586c + <<<<<<< HEAD + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if(a.size() != b.size()) { + ||||||| 4551c3df1 + if(a.size() != b.size()) { + ======= + if (a.size() != b.size()) { + >>>>>>> TEMP_RIGHT_BRANCH +3:578,580c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if (a.size() != b.size()) { +====1 +1:561c + if(helper.cardinalityA.size() != helper.cardinalityB.size()) { +2:590c +3:584c + if (helper.cardinalityA.size() != helper.cardinalityB.size()) { +====1 +1:564,565c + for( final Object obj : helper.cardinalityA.keySet()) { + if(helper.freqA(obj) != helper.freqB(obj)) { +2:593,594c +3:587,588c + for (final Object obj : helper.cardinalityA.keySet()) { + if (helper.freqA(obj) != helper.freqB(obj)) { +====1 +1:591c + * @return true iff the collections contain the same elements with the same cardinalities. +2:620c +3:614c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:598,600c + if (equator == null) { + throw new NullPointerException("Equator must not be null."); + } +2:627,639c + <<<<<<< HEAD + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); + if(a.size() != b.size()) { + ||||||| 4551c3df1 + if (equator == null) { + throw new NullPointerException("Equator must not be null."); + } + + if(a.size() != b.size()) { + ======= + Objects.requireNonNull(equator, "equator"); +3:621,623c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "equator"); +==== +1:602c + if(a.size() != b.size()) { +2:641,642c + if (a.size() != b.size()) { + >>>>>>> TEMP_RIGHT_BRANCH +3:625c + if (a.size() != b.size()) { +====1 +1:626c + public EquatorWrapper(final Equator equator, final O object) { +2:666c +3:649c + EquatorWrapper(final Equator equator, final O object) { +====1 +1:655c + * @param coll the {@link Iterable} to search +2:695c +3:678c + * @param collection the {@link Iterable} to search +==== +1:663,667c + public static int cardinality(final O obj, final Iterable coll) { + if (coll == null) { + throw new NullPointerException("coll must not be null."); + } + return IterableUtils.frequency(coll, obj); +2:703,716c + <<<<<<< HEAD + public static int cardinality(final O obj, final Iterable coll) { + Objects.requireNonNull(coll, "The collection must not be null."); + return IterableUtils.frequency(coll, obj); + ||||||| 4551c3df1 + public static int cardinality(final O obj, final Iterable coll) { + if (coll == null) { + throw new NullPointerException("coll must not be null."); + } + return IterableUtils.frequency(coll, obj); + ======= + public static int cardinality(final O obj, final Iterable collection) { + return IterableUtils.frequency(Objects.requireNonNull(collection, "collection"), obj); + >>>>>>> TEMP_RIGHT_BRANCH +3:686,687c + public static int cardinality(final O obj, final Iterable collection) { + return IterableUtils.frequency(Objects.requireNonNull(collection, "collection"), obj); +====1 +1:799c + * This is equivalent to filter(collection, PredicateUtils.notPredicate(predicate)) +2:848c +3:819c + * This is equivalent to {@code filter(collection, PredicateUtils.notPredicate(predicate))} +====1 +1:856c + * A null collection or predicate matches no elements. +2:905c +3:876c + * A {@code null} collection or predicate matches no elements. +====1 +1:874c + * A null collection or predicate returns false. +2:923c +3:894c + * A {@code null} collection or predicate returns false. +====1 +1:893c + * A null predicate returns false. +2:942c +3:913c + * A {@code null} predicate returns false. +====1 +1:896c + * A null or empty collection returns true. +2:945c +3:916c + * A {@code null} or empty collection returns true. +====1 +1:916c + * A null predicate matches no elements. +2:965c +3:936c + * A {@code null} predicate matches no elements. +====1 +1:923c + * @throws NullPointerException if the input collection is null +2:971a +3:942a +====1 +1:927,928c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:975,979c +3:946,950c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====1 +1:965,966c + * Elements matching the predicate are added to the outputCollection, + * all other elements are added to the rejectedCollection. +2:1016,1017c +3:987,988c + * Elements matching the predicate are added to the {@code outputCollection}, + * all other elements are added to the {@code rejectedCollection}. +====1 +1:969,970c + * If the input predicate is null, no elements are added to + * outputCollection or rejectedCollection. +2:1020,1021c +3:991,992c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection} or {@code rejectedCollection}. +====1 +1:1010c + * If the input predicate is null, the result is an empty +2:1061c +3:1032c + * If the input predicate is {@code null}, the result is an empty +====1 +1:1018c + * @throws NullPointerException if the input collection is null +2:1068a +3:1039a +====1 +1:1022,1023c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1072,1076c +3:1043,1047c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====1 +1:1031,1032c + * If the input predicate is null, no elements are added to + * outputCollection. +2:1084,1085c +3:1055,1056c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection}. +====1 +1:1072,1073c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1125,1129c +3:1096,1100c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +==== +1:1165,1167c + if (collection == null) { + throw new NullPointerException("The collection must not be null"); + } +2:1221,1229c + <<<<<<< HEAD + Objects.requireNonNull(collection, "The collection must not be null."); + ||||||| 4551c3df1 + if (collection == null) { + throw new NullPointerException("The collection must not be null"); + } + ======= + Objects.requireNonNull(collection, "collection"); + >>>>>>> TEMP_RIGHT_BRANCH +3:1192c + Objects.requireNonNull(collection, "collection"); +====1 +1:1182a +2:1245,1246c +3:1208,1209c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterable, "The iterable of elements to add must not be null."); +====1 +1:1198a +2:1263,1264c +3:1226,1227c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterator, "The iterator of elements to add must not be null."); +====1 +1:1215a +2:1282,1283c +3:1245,1246c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(enumeration, "The enumeration of elements to add must not be null."); +====1 +1:1232a +2:1301,1302c +3:1264,1265c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(elements, "The array of elements to add must not be null."); +====1 +1:1241,1242c + * Returns the index-th value in {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +2:1311,1312c +3:1274,1275c + * Returns the {@code index}-th value in {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1244,1245c + * The Iterator is advanced to index (or to the end, if + * index exceeds the number of entries) as a side effect of this method. +2:1314,1315c +3:1277,1278c + * The Iterator is advanced to {@code index} (or to the end, if + * {@code index} exceeds the number of entries) as a side effect of this method. +====1 +1:1257a +2:1328c +3:1291c + Objects.requireNonNull(iterator, "The iterator must not be null."); +====1 +1:1273,1274c + * Returns the index-th value in the iterable's {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +2:1344,1345c +3:1307,1308c + * Returns the {@code index}-th value in the {@code iterable}'s {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1287a +2:1359c +3:1322c + Objects.requireNonNull(iterable, "The iterable must not be null."); +====1 +1:1292,1294c + * Returns the index-th value in object, throwing + * IndexOutOfBoundsException if there is no such element or + * IllegalArgumentException if object is not an +2:1364,1366c +3:1327,1329c + * Returns the {@code index}-th value in {@code object}, throwing + * {@code IndexOutOfBoundsException} if there is no such element or + * {@code IllegalArgumentException} if {@code object} is not an +====1 +1:1300,1301c + *
  • Map -- the value returned is the Map.Entry in position + * index in the map's entrySet iterator, +2:1372,1373c +3:1335,1336c + *
  • Map -- the value returned is the {@code Map.Entry} in position + * {@code index} in the map's {@code entrySet} iterator, +====1 +1:1304,1305c + *
  • Array -- the index-th array entry is returned, + * if there is such an entry; otherwise an IndexOutOfBoundsException +2:1376,1377c +3:1339,1340c + *
  • Array -- the {@code index}-th array entry is returned, + * if there is such an entry; otherwise an {@code IndexOutOfBoundsException} +====1 +1:1307c + *
  • Collection -- the value returned is the index-th object +2:1379c +3:1342c + *
  • Collection -- the value returned is the {@code index}-th object +====1 +1:1310c + * index-th object in the Iterator/Enumeration, if there +2:1382c +3:1345c + * {@code index}-th object in the Iterator/Enumeration, if there +====1 +1:1312c + * index (or to the end, if index exceeds the +2:1384c +3:1347c + * {@code index} (or to the end, if {@code index} exceeds the +====1 +1:1327c + if (object instanceof Map) { +2:1399c +3:1362c + if (object instanceof Map) { +====1 +1:1354,1355c + * Returns the index-th Map.Entry in the map's entrySet, + * throwing IndexOutOfBoundsException if there is no such element. +2:1426,1427c +3:1389,1390c + * Returns the {@code index}-th {@code Map.Entry} in the {@code map}'s {@code entrySet}, + * throwing {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1358c + * @param the key type in the {@link Map} +2:1430c +3:1393c + * @param the value type in the {@link Map} +==== +1:1364c + public static Map.Entry get(final Map map, final int index) { +2:1436,1443c + <<<<<<< HEAD + public static Map.Entry get(final Map map, final int index) { + Objects.requireNonNull(map, "The map must not be null."); + ||||||| 4551c3df1 + public static Map.Entry get(final Map map, final int index) { + ======= + public static Map.Entry get(final Map map, final int index) { + >>>>>>> TEMP_RIGHT_BRANCH +3:1399,1400c + public static Map.Entry get(final Map map, final int index) { + Objects.requireNonNull(map, "The map must not be null."); +====1 +1:1392c + if (object instanceof Map) { +2:1471c +3:1428c + if (object instanceof Map) { +====1 +1:1499a +2:1579c +3:1536c + Objects.requireNonNull(array, "The array must not be null."); +====1 +1:1526c + * @param coll the collection to check +2:1606c +3:1563c + * @param collection the collection to check +==== +1:1530,1535c + public static boolean isFull(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); +2:1610,1627c + <<<<<<< HEAD + public static boolean isFull(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); + ||||||| 4551c3df1 + public static boolean isFull(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); + ======= + public static boolean isFull(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).isFull(); + >>>>>>> TEMP_RIGHT_BRANCH +3:1567,1570c + public static boolean isFull(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).isFull(); +====1 +1:1539c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +2:1631c +3:1574c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====1 +1:1559c + * @param coll the collection to check +2:1651c +3:1594c + * @param collection the collection to check +==== +1:1563,1568c + public static int maxSize(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); +2:1655,1672c + <<<<<<< HEAD + public static int maxSize(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); + ||||||| 4551c3df1 + public static int maxSize(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); + ======= + public static int maxSize(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).maxSize(); + >>>>>>> TEMP_RIGHT_BRANCH +3:1598,1601c + public static int maxSize(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).maxSize(); +====1 +1:1572c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +2:1676c +3:1605c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====1 +1:1649,1651c + * @param a the first collection, must not be null + * @param b the second collection, must not be null + * @param c the comparator to use for the merge. +2:1753,1755c +3:1682,1684c + * @param iterableA the first collection, must not be null + * @param iterableB the second collection, must not be null + * @param comparator the comparator to use for the merge. +==== +1:1658,1659c + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { +2:1762,1780c + <<<<<<< HEAD + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(c, "The comparator must not be null."); + ||||||| 4551c3df1 + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + + if (a == null || b == null) { + throw new NullPointerException("The collections must not be null"); + } + if (c == null) { + throw new NullPointerException("The comparator must not be null"); + } + ======= + public static List collate(final Iterable iterableA, final Iterable iterableB, + final Comparator comparator, final boolean includeDuplicates) { +3:1691,1692c + public static List collate(final Iterable iterableA, final Iterable iterableB, + final Comparator comparator, final boolean includeDuplicates) { +==== +1:1661,1666c + if (a == null || b == null) { + throw new NullPointerException("The collections must not be null"); + } + if (c == null) { + throw new NullPointerException("The comparator must not be null"); + } +2:1782,1785c + Objects.requireNonNull(iterableA, "iterableA"); + Objects.requireNonNull(iterableB, "iterableB"); + Objects.requireNonNull(comparator, "comparator"); + >>>>>>> TEMP_RIGHT_BRANCH +3:1694,1696c + Objects.requireNonNull(iterableA, "iterableA"); + Objects.requireNonNull(iterableB, "iterableB"); + Objects.requireNonNull(comparator, "comparator"); +====1 +1:1669,1670c + final int totalSize = a instanceof Collection && b instanceof Collection ? + Math.max(1, ((Collection) a).size() + ((Collection) b).size()) : 10; +2:1788,1789c +3:1699,1700c + final int totalSize = iterableA instanceof Collection && iterableB instanceof Collection ? + Math.max(1, ((Collection) iterableA).size() + ((Collection) iterableB).size()) : 10; +====1 +1:1672c + final Iterator iterator = new CollatingIterator<>(c, a.iterator(), b.iterator()); +2:1791c +3:1702c + final Iterator iterator = new CollatingIterator<>(comparator, iterableA.iterator(), iterableB.iterator()); +====1 +1:1713a +2:1833c +3:1744c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1724,1727c + * Returns a collection containing all the elements in collection + * that are also in retain. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless retain does not contain e, in which +2:1844,1847c +3:1755,1758c + * Returns a collection containing all the elements in {@code collection} + * that are also in {@code retain}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code retain} does not contain {@code e}, in which +====1 +1:1729c + * the collection c and thus cannot call c.retainAll(retain);. +2:1849c +3:1760c + * the collection {@code c} and thus cannot call {@code c.retainAll(retain);}. +====1 +1:1731,1732c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in retain. If it's contained, it's added +2:1851,1852c +3:1762,1763c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code retain}. If it's contained, it's added +====1 +1:1734c + * retain that provides a fast (e.g. O(1)) implementation of +2:1854c +3:1765c + * {@code retain} that provides a fast (e.g. O(1)) implementation of +====1 +1:1741,1742c + * @return a Collection containing all the elements of collection + * that occur at least once in retain. +2:1861,1862c +3:1772,1773c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain}. +====1 +1:1746a +2:1867,1868c +3:1778,1779c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); +====1 +1:1752,1755c + * collection that are also in retain. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless retain does not contain e, in which case +2:1874,1877c +3:1785,1788c + * {@code collection} that are also in {@code retain}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code retain} does not contain {@code e}, in which case +====1 +1:1757,1758c + * modify the collection c and thus cannot call + * c.retainAll(retain);. +2:1879,1880c +3:1790,1791c + * modify the collection {@code c} and thus cannot call + * {@code c.retainAll(retain);}. +====1 +1:1762c + * in collection and retain. Hence this method is +2:1884c +3:1795c + * in {@code collection} and {@code retain}. Hence this method is +====1 +1:1771,1772c + * @return a Collection containing all the elements of collection + * that occur at least once in retain according to the equator +2:1893,1894c +3:1804,1805c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain} according to the {@code equator} +====1 +1:1779c + +2:1901,1903c +3:1812,1814c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1800,1802c + * @param input the collection will be operated, can't be null + * @param startIndex the start index (inclusive) to remove element, can't be less than 0 + * @param endIndex the end index (exclusive) to remove, can't be less than startIndex +2:1924,1926c +3:1835,1837c + * @param input the collection will be operated, must not be null + * @param startIndex the start index (inclusive) to remove element, must not be less than 0 + * @param endIndex the end index (exclusive) to remove, must not be less than startIndex +====1 +1:1807,1809c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1931c +3:1842c + Objects.requireNonNull(input, "The collection must not be null."); +====1 +1:1831,1833c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1953c +3:1864c + Objects.requireNonNull(input, "The collection must not be null."); +====1 +1:1845,1846c + Collection result = new ArrayList(count); + Iterator iterator = input.iterator(); +2:1965,1966c +3:1876,1877c + final Collection result = new ArrayList<>(count); + final Iterator iterator = input.iterator(); +====1 +1:1861,1865c + * Removes the elements in remove from collection. That is, this + * method returns a collection containing all the elements in c + * that are not in remove. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless remove contains e, in which +2:1981,1985c +3:1892,1896c + * Removes the elements in {@code remove} from {@code collection}. That is, this + * method returns a collection containing all the elements in {@code c} + * that are not in {@code remove}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code remove} contains {@code e}, in which +====1 +1:1867c + * the collection c and thus cannot call collection.removeAll(remove);. +2:1987c +3:1898c + * the collection {@code c} and thus cannot call {@code collection.removeAll(remove);}. +====1 +1:1869,1870c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in remove. If it's not contained, it's added +2:1989,1990c +3:1900,1901c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code remove}. If it's not contained, it's added +====1 +1:1872c + * remove that provides a fast (e.g. O(1)) implementation of +2:1992c +3:1903c + * {@code remove} that provides a fast (e.g. O(1)) implementation of +====1 +1:1878,1880c + * @param remove the items to be removed from the returned collection + * @return a Collection containing all the elements of collection except + * any elements that also occur in remove. +2:1998,2000c +3:1909,1911c + * @param remove the items to be removed from the returned {@code collection} + * @return a {@code Collection} containing all the elements of {@code collection} except + * any elements that also occur in {@code remove}. +====1 +1:1886c + } +2:2006c +3:1917c + } +====1 +1:1889c + * Removes all elements in remove from collection. +2:2009c +3:1920c + * Removes all elements in {@code remove} from {@code collection}. +====1 +1:1891,1894c + * collection that are not in remove. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless remove contains e, in which case the +2:2011,2014c +3:1922,1925c + * {@code collection} that are not in {@code remove}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code remove} contains {@code e}, in which case the +====1 +1:1896,1897c + * the collection c and thus cannot call + * collection.removeAll(remove). +2:2016,2017c +3:1927,1928c + * the collection {@code c} and thus cannot call + * {@code collection.removeAll(remove)}. +====1 +1:1901c + * in collection and remove. Hence this method is +2:2021c +3:1932c + * in {@code collection} and {@code remove}. Hence this method is +====1 +1:1910,1911c + * @return a Collection containing all the elements of collection + * except any element that if equal according to the equator +2:2030,2031c +3:1941,1942c + * @return a {@code Collection} containing all the elements of {@code collection} + * except any element that if equal according to the {@code equator} +====1 +1:1918c + +2:2038,2040c +3:1949,1951c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(remove, "The items to be removed must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1960a +2:2083c +3:1994c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1977a +2:2101c +3:2012c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1997a +2:2122,2123c +3:2033,2034c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(predicate, "The predicate must not be null."); +====1 +1:2020a +2:2147,2148c +3:2058,2059c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(transformer, "The transformer must not be null."); +==== +1:2035,2037c + if (collection == null) { + throw new NullPointerException("Collection must not be null."); + } +2:2163,2171c + <<<<<<< HEAD + Objects.requireNonNull(collection, "The collection must not be null."); + ||||||| 4551c3df1 + if (collection == null) { + throw new NullPointerException("Collection must not be null."); + } + ======= + Objects.requireNonNull(collection, "collection"); + >>>>>>> TEMP_RIGHT_BRANCH +3:2074c + Objects.requireNonNull(collection, "collection"); diff --git a/src/python/merge_conflict_analysis_diffs/184/intellimerge/diff_CollectionUtilsTest.java.txt b/src/python/merge_conflict_analysis_diffs/184/intellimerge/diff_CollectionUtilsTest.java.txt new file mode 100644 index 0000000000..b98dcdbd96 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/intellimerge/diff_CollectionUtilsTest.java.txt @@ -0,0 +1,725 @@ +====1 +1:193a +2:194,198c +3:194,198c + @Test(expected = NullPointerException.class) + public void testGetCardinalityMapNull() { + CollectionUtils.getCardinalityMap(null); + } + +====1 +1:349a +2:355,377c +3:355,377c + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.containsAny(null, list); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl3() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:383a +2:412,433c +3:412,433c + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl1() { + final String[] oneArr = {"1"}; + CollectionUtils.containsAny(null, oneArr); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullArray() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:402a +2:453,466c +3:453,466c + @Test(expected = NullPointerException.class) + public void testUnionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(null, list); + } + + @Test(expected = NullPointerException.class) + public void testUnionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(list, null); + } + +====1 +1:421a +2:486,499c +3:486,499c + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(list, null); + } + +====1 +1:440a +2:519,532c +3:519,532c + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(null, list); + } + + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(list, null); + } + +====1 +1:475a +2:568,581c +3:568,581c + @Test(expected = NullPointerException.class) + public void testSubtractNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(null, list); + } + + @Test(expected = NullPointerException.class) + public void testSubtractNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(list, null); + } + +====1 +1:541a +2:648,661c +3:648,661c + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(list, null); + } + +====1 +1:623a +2:744,789c +3:744,789c + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(null, list, e); + } + + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(list, null, e); + } + +====1 +1:645a +2:812,825c +3:812,825c + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(list, null); + } + +====1 +1:655c + assertNull(CollectionUtils.find(null,testPredicate)); +2:835c +3:835c + assertNull(CollectionUtils.find(null, testPredicate)); +====1 +1:1279c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long)input).intValue()); +2:1459c +3:1459c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long) input).intValue()); +====1 +1:1331a +2:1512,1516c +3:1512,1516c + @Test(expected = NullPointerException.class) + public void testAddIgnoreNullNullColl() { + CollectionUtils.addIgnoreNull(null, "1"); + } + +====1 +1:1338,1349c + try { + CollectionUtils.predicatedCollection(new ArrayList(), null); + fail("Expecting NullPointerException for null predicate."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.predicatedCollection(null, predicate); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1523,1534c +3:1523,1534c + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullColl() { + final Predicate predicate = PredicateUtils.instanceofPredicate(Integer.class); + CollectionUtils.predicatedCollection(null, predicate); + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullPredicate() { + final Collection list = new ArrayList<>(); + CollectionUtils.predicatedCollection(list, null); +====1 +1:1358,1362c + try { + CollectionUtils.isFull(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1542a +3:1542a +====1 +1:1372a +2:1553,1557c +3:1553,1557c + @Test(expected = NullPointerException.class) + public void testIsFullNullColl() { + CollectionUtils.isFull(null); + } + +====1 +1:1385,1389c + try { + CollectionUtils.maxSize(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1569a +3:1569a +====1 +1:1399a +2:1580,1584c +3:1580,1584c + @Test(expected = NullPointerException.class) + public void testMaxSizeNullColl() { + CollectionUtils.maxSize(null); + } + +====1 +1:1466a +2:1652c +3:1652c + } +====1 +1:1468,1472c + try { + CollectionUtils.retainAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1654,1665c +3:1654,1665c + @Test(expected = NullPointerException.class) + public void testRetainAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.retainAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRetainAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.retainAll(base, null); +====1 +1:1477c + List list = new ArrayList<>(); +2:1670c +3:1670c + final List list = new ArrayList<>(); +====1 +1:1493c + @Test(expected=IllegalArgumentException.class) +2:1686c +3:1686c + @Test(expected=NullPointerException.class) +==== +1:1495,1496c + Collection list = null; + Collection result = CollectionUtils.removeRange(list, 0, 0); +2:1688,1697c + <<<<<<< HEAD + Collection list = null; + CollectionUtils.removeRange(list, 0, 0); + ||||||| 4551c3df1 + Collection list = null; + Collection result = CollectionUtils.removeRange(list, 0, 0); + ======= + final Collection list = null; + final Collection result = CollectionUtils.removeRange(list, 0, 0); + >>>>>>> TEMP_RIGHT_BRANCH +3:1688,1689c + final Collection list = null; + CollectionUtils.removeRange(list, 0, 0); +====1 +1:1501c + Collection list = new ArrayList<>(); +2:1702c +3:1694c + final Collection list = new ArrayList<>(); +==== +1:1503c + Collection result = CollectionUtils.removeRange(list, -1, 1); +2:1704,1710c + <<<<<<< HEAD + CollectionUtils.removeRange(list, -1, 1); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, -1, 1); + ======= + final Collection result = CollectionUtils.removeRange(list, -1, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1696c + CollectionUtils.removeRange(list, -1, 1); +====1 +1:1508c + Collection list = new ArrayList<>(); +2:1715c +3:1701c + final Collection list = new ArrayList<>(); +==== +1:1510c + Collection result = CollectionUtils.removeRange(list, 0, -1); +2:1717,1723c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 0, -1); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 0, -1); + ======= + final Collection result = CollectionUtils.removeRange(list, 0, -1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1703c + CollectionUtils.removeRange(list, 0, -1); +====1 +1:1515c + Collection list = new ArrayList<>(); +2:1728c +3:1708c + final Collection list = new ArrayList<>(); +==== +1:1518c + Collection result = CollectionUtils.removeRange(list, 1, 0); +2:1731,1737c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 1, 0); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 1, 0); + ======= + final Collection result = CollectionUtils.removeRange(list, 1, 0); + >>>>>>> TEMP_RIGHT_BRANCH +3:1711c + CollectionUtils.removeRange(list, 1, 0); +====1 +1:1523c + Collection list = new ArrayList<>(); +2:1742c +3:1716c + final Collection list = new ArrayList<>(); +==== +1:1525c + Collection result = CollectionUtils.removeRange(list, 0, 2); +2:1744,1750c + <<<<<<< HEAD + CollectionUtils.removeRange(list, 0, 2); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeRange(list, 0, 2); + ======= + final Collection result = CollectionUtils.removeRange(list, 0, 2); + >>>>>>> TEMP_RIGHT_BRANCH +3:1718c + CollectionUtils.removeRange(list, 0, 2); +====1 +1:1530c + List list = new ArrayList<>(); +2:1755c +3:1723c + final List list = new ArrayList<>(); +====1 +1:1559c + @Test(expected=IllegalArgumentException.class) +2:1784c +3:1752c + @Test(expected=NullPointerException.class) +==== +1:1561,1562c + Collection list = null; + Collection result = CollectionUtils.removeCount(list, 0, 1); +2:1786,1795c + <<<<<<< HEAD + Collection list = null; + CollectionUtils.removeCount(list, 0, 1); + ||||||| 4551c3df1 + Collection list = null; + Collection result = CollectionUtils.removeCount(list, 0, 1); + ======= + final Collection list = null; + final Collection result = CollectionUtils.removeCount(list, 0, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1754,1755c + final Collection list = null; + CollectionUtils.removeCount(list, 0, 1); +==== +1:1567,1568c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, -1, 1); +2:1800,1809c + <<<<<<< HEAD + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); + ||||||| 4551c3df1 + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, -1, 1); + ======= + final Collection list = new ArrayList<>(); + final Collection result = CollectionUtils.removeCount(list, -1, 1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1760,1761c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); +==== +1:1573,1574c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, 0, -1); +2:1814,1823c + <<<<<<< HEAD + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); + ||||||| 4551c3df1 + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, 0, -1); + ======= + final Collection list = new ArrayList<>(); + final Collection result = CollectionUtils.removeCount(list, 0, -1); + >>>>>>> TEMP_RIGHT_BRANCH +3:1766,1767c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); +====1 +1:1579c + Collection list = new ArrayList<>(); +2:1828c +3:1772c + final Collection list = new ArrayList<>(); +==== +1:1581c + Collection result = CollectionUtils.removeCount(list, 0, 2); +2:1830,1836c + <<<<<<< HEAD + CollectionUtils.removeCount(list, 0, 2); + ||||||| 4551c3df1 + Collection result = CollectionUtils.removeCount(list, 0, 2); + ======= + final Collection result = CollectionUtils.removeCount(list, 0, 2); + >>>>>>> TEMP_RIGHT_BRANCH +3:1774c + CollectionUtils.removeCount(list, 0, 2); +====1 +1:1607a +2:1863c +3:1801c + } +====1 +1:1609,1613c + try { + CollectionUtils.removeAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1865,1876c +3:1803,1814c + @Test(expected = NullPointerException.class) + public void testRemoveAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.removeAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRemoveAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.removeAll(base, null); +====1 +1:1622,1633c + try { + CollectionUtils.transformingCollection(new ArrayList<>(), null); + fail("Expecting NullPointerException for null transformer."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.transformingCollection(null, transformer); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1885,1896c +3:1823,1834c + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullColl() { + final Transformer transformer = TransformerUtils.nopTransformer(); + CollectionUtils.transformingCollection(null, transformer); + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullTransformer() { + final List list = new ArrayList<>(); + CollectionUtils.transformingCollection(list, null); +====1 +1:1740c + CollectionUtils.addAll(collectionA, new Integer[]{5}); +2:2003c +3:1941c + CollectionUtils.addAll(collectionA, 5); +==== +1:1744c + @Test(expected=IndexOutOfBoundsException.class) +2:2007,2047c + <<<<<<< HEAD + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected=IndexOutOfBoundsException.class) + ||||||| 4551c3df1 + @Test(expected=IndexOutOfBoundsException.class) + ======= + @Test(expected = IndexOutOfBoundsException.class) + >>>>>>> TEMP_RIGHT_BRANCH +3:1945,1979c + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected = IndexOutOfBoundsException.class) +====1 +1:1746c + CollectionUtils.get((Object)collectionA, -3); +2:2049c +3:1981c + CollectionUtils.get((Object) collectionA, -3); +====1 +1:1749c + @Test(expected=IndexOutOfBoundsException.class) +2:2052c +3:1984c + @Test(expected = IndexOutOfBoundsException.class) +====1 +1:1751c + CollectionUtils.get((Object)collectionA.iterator(), 30); +2:2054c +3:1986c + CollectionUtils.get((Object) collectionA.iterator(), 30); +====1 +1:1754c + @Test(expected=IllegalArgumentException.class) +2:2057c +3:1989c + @Test(expected = IllegalArgumentException.class) +====1 +1:1756c + CollectionUtils.get((Object)null, 0); +2:2059c +3:1991c + CollectionUtils.get((Object) null, 0); +====1 +1:1761,1762c + assertEquals(2, CollectionUtils.get((Object)collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object)collectionA.iterator(), 2)); +2:2064,2065c +3:1996,1997c + assertEquals(2, CollectionUtils.get((Object) collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object) collectionA.iterator(), 2)); +====1 +1:1764c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object)map, 0)); +2:2067c +3:1999c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object) map, 0)); +====1 +1:1795a +2:2099,2103c +3:2031,2035c + @Test(expected = NullPointerException.class) + public void testReverseArrayNull() { + CollectionUtils.reverseArray(null); + } + +====1 +1:1828a +2:2137,2141c +3:2069,2073c + public void collateException0() { + CollectionUtils.collate(null, collectionC); + } + + @Test(expected=NullPointerException.class) diff --git a/src/python/merge_conflict_analysis_diffs/184/spork/diff_CollectionUtils.java.txt b/src/python/merge_conflict_analysis_diffs/184/spork/diff_CollectionUtils.java.txt new file mode 100644 index 0000000000..9187319c0a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/spork/diff_CollectionUtils.java.txt @@ -0,0 +1,884 @@ +====1 +1:30a +2:31c +3:31c + import java.util.Objects; +====3 +1:75c +2:76c + public CardinalityHelper(final Iterable a, final Iterable b) { +3:76c + CardinalityHelper(final Iterable a, final Iterable b) { +====3 +1:142c +2:143c + public SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +3:143c + SetOperationCardinalityHelper(final Iterable a, final Iterable b) { +====3 +1:187c +2:188c + * CollectionUtils should not normally be instantiated. +3:188c + * {@code CollectionUtils} should not normally be instantiated. +====3 +1:205c +2:206c + * Returns an immutable empty collection if the argument is null, +3:206c + * Returns an immutable empty collection if the argument is {@code null}, +====3 +1:209,210c +2:210,211c + * @param collection the collection, possibly null + * @return an empty collection if the argument is null +3:210,211c + * @param collection the collection, possibly {@code null} + * @return an empty collection if the argument is {@code null} +====1 +1:232a +2:234,235c +3:234,235c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:257a +2:261,262c +3:261,262c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:287a +2:293,294c +3:293,294c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====1 +1:340a +2:348,350c +3:348,350c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(p, "The predicate must not be null."); +====3 +1:357c +2:367c + * Returns true iff all elements of {@code coll2} are also contained +3:367c + * Returns {@code true} iff all elements of {@code coll2} are also contained +====3 +1:361c +2:371c + * In other words, this method returns true iff the +3:371c + * In other words, this method returns {@code true} iff the +====3 +1:376c +2:386c + * @return true iff the intersection of the collections has the same cardinality +3:386c + * @return {@code true} iff the intersection of the collections has the same cardinality +====1 +1:380a +2:391,392c +3:391,392c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====3 +1:409c +2:421c + * Returns true iff at least one element is in both collections. +3:421c + * Returns {@code true} iff at least one element is in both collections. +====3 +1:411c +2:423c + * In other words, this method returns true iff the +3:423c + * In other words, this method returns {@code true} iff the +====3 +1:415c +2:427c + * @param the type of object to lookup in coll1. +3:427c + * @param the type of object to lookup in {@code coll1}. +====3 +1:418c +2:430c + * @return true iff the intersection of the collections is non-empty +3:430c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:422a +2:435,436c +3:435,436c + Objects.requireNonNull(coll1, "The collection must not be null."); + Objects.requireNonNull(coll2, "The elements must not be null."); +====3 +1:440c +2:454c + * Returns true iff at least one element is in both collections. +3:454c + * Returns {@code true} iff at least one element is in both collections. +====3 +1:442c +2:456c + * In other words, this method returns true iff the +3:456c + * In other words, this method returns {@code true} iff the +====3 +1:448c +2:462c + * @return true iff the intersection of the collections is non-empty +3:462c + * @return {@code true} iff the intersection of the collections is non-empty +====1 +1:452a +2:467,468c +3:467,468c + Objects.requireNonNull(coll1, "The first collection must not be null."); + Objects.requireNonNull(coll2, "The second collection must not be null."); +====1 +1:482a +2:499c +3:499c + Objects.requireNonNull(coll, "The collection must not be null."); +====3 +1:503c +2:520c + * @return true iff a is a sub-collection of b +3:520c + * @return {@code true} iff a is a sub-collection of b +====1 +1:507a +2:525,526c +3:525,526c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====3 +1:528c +2:547c + *
  • a.size() and b.size() represent the +3:547c + *
  • {@code a.size()} and {@code b.size()} represent the +====3 +1:530c +2:549c + *
  • a.size() < Integer.MAXVALUE
  • +3:549c + *
  • {@code a.size() < Integer.MAXVALUE}
  • +====3 +1:535c +2:554c + * @return true iff a is a proper sub-collection of b +3:554c + * @return {@code true} iff a is a proper sub-collection of b +====1 +1:539a +2:559,560c +3:559,560c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); +====3 +1:554c +2:575c + * @return true iff the collections contain the same elements with the same cardinalities. +3:575c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:557c + if(a.size() != b.size()) { +2:578,580c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if(a.size() != b.size()) { +3:578,580c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + if (a.size() != b.size()) { +====3 +1:561c +2:584c + if(helper.cardinalityA.size() != helper.cardinalityB.size()) { +3:584c + if (helper.cardinalityA.size() != helper.cardinalityB.size()) { +====3 +1:564,565c +2:587,588c + for( final Object obj : helper.cardinalityA.keySet()) { + if(helper.freqA(obj) != helper.freqB(obj)) { +3:587,588c + for (final Object obj : helper.cardinalityA.keySet()) { + if (helper.freqA(obj) != helper.freqB(obj)) { +====3 +1:591c +2:614c + * @return true iff the collections contain the same elements with the same cardinalities. +3:614c + * @return {@code true} iff the collections contain the same elements with the same cardinalities. +==== +1:598,602c + if (equator == null) { + throw new NullPointerException("Equator must not be null."); + } + + if(a.size() != b.size()) { +2:621,624c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); + if(a.size() != b.size()) { +3:621,625c + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(equator, "equator"); + + if (a.size() != b.size()) { +====3 +1:626c +2:648c + public EquatorWrapper(final Equator equator, final O object) { +3:649c + EquatorWrapper(final Equator equator, final O object) { +====3 +1:655c +2:677c + * @param coll the {@link Iterable} to search +3:678c + * @param collection the {@link Iterable} to search +==== +1:663,667c + public static int cardinality(final O obj, final Iterable coll) { + if (coll == null) { + throw new NullPointerException("coll must not be null."); + } + return IterableUtils.frequency(coll, obj); +2:685,687c + public static int cardinality(final O obj, final Iterable coll) { + Objects.requireNonNull(coll, "The collection must not be null."); + return IterableUtils.frequency(coll, obj); +3:686,687c + public static int cardinality(final O obj, final Iterable collection) { + return IterableUtils.frequency(Objects.requireNonNull(collection, "collection"), obj); +====3 +1:799c +2:819c + * This is equivalent to filter(collection, PredicateUtils.notPredicate(predicate)) +3:819c + * This is equivalent to {@code filter(collection, PredicateUtils.notPredicate(predicate))} +====3 +1:856c +2:876c + * A null collection or predicate matches no elements. +3:876c + * A {@code null} collection or predicate matches no elements. +====3 +1:874c +2:894c + * A null collection or predicate returns false. +3:894c + * A {@code null} collection or predicate returns false. +====3 +1:893c +2:913c + * A null predicate returns false. +3:913c + * A {@code null} predicate returns false. +====3 +1:896c +2:916c + * A null or empty collection returns true. +3:916c + * A {@code null} or empty collection returns true. +====3 +1:916c +2:936c + * A null predicate matches no elements. +3:936c + * A {@code null} predicate matches no elements. +====1 +1:923c + * @throws NullPointerException if the input collection is null +2:942a +3:942a +====1 +1:927,928c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:946,950c +3:946,950c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====3 +1:965,966c +2:987,988c + * Elements matching the predicate are added to the outputCollection, + * all other elements are added to the rejectedCollection. +3:987,988c + * Elements matching the predicate are added to the {@code outputCollection}, + * all other elements are added to the {@code rejectedCollection}. +====3 +1:969,970c +2:991,992c + * If the input predicate is null, no elements are added to + * outputCollection or rejectedCollection. +3:991,992c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection} or {@code rejectedCollection}. +====3 +1:1010c +2:1032c + * If the input predicate is null, the result is an empty +3:1032c + * If the input predicate is {@code null}, the result is an empty +====1 +1:1018c + * @throws NullPointerException if the input collection is null +2:1039a +3:1039a +====1 +1:1022,1023c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1043,1047c +3:1043,1047c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +====3 +1:1031,1032c +2:1055,1056c + * If the input predicate is null, no elements are added to + * outputCollection. +3:1055,1056c + * If the input predicate is {@code null}, no elements are added to + * {@code outputCollection}. +====1 +1:1072,1073c + final Collection answer = inputCollection instanceof Collection ? + new ArrayList<>(((Collection) inputCollection).size()) : new ArrayList<>(); +2:1096,1100c +3:1096,1100c + int size = 0; + if (null != inputCollection) { + size = inputCollection instanceof Collection ? ((Collection) inputCollection).size() : 0; + } + final Collection answer = size == 0 ? new ArrayList<>() : new ArrayList<>(size); +==== +1:1165,1167c + if (collection == null) { + throw new NullPointerException("The collection must not be null"); + } +2:1192c + Objects.requireNonNull(collection, "The collection must not be null."); +3:1192c + Objects.requireNonNull(collection, "collection"); +====1 +1:1182a +2:1208,1209c +3:1208,1209c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterable, "The iterable of elements to add must not be null."); +====1 +1:1198a +2:1226,1227c +3:1226,1227c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(iterator, "The iterator of elements to add must not be null."); +====1 +1:1215a +2:1245,1246c +3:1245,1246c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(enumeration, "The enumeration of elements to add must not be null."); +====1 +1:1232a +2:1264,1265c +3:1264,1265c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(elements, "The array of elements to add must not be null."); +====3 +1:1241,1242c +2:1274,1275c + * Returns the index-th value in {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +3:1274,1275c + * Returns the {@code index}-th value in {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====3 +1:1244,1245c +2:1277,1278c + * The Iterator is advanced to index (or to the end, if + * index exceeds the number of entries) as a side effect of this method. +3:1277,1278c + * The Iterator is advanced to {@code index} (or to the end, if + * {@code index} exceeds the number of entries) as a side effect of this method. +====1 +1:1257a +2:1291c +3:1291c + Objects.requireNonNull(iterator, "The iterator must not be null."); +====3 +1:1273,1274c +2:1307,1308c + * Returns the index-th value in the iterable's {@link Iterator}, throwing + * IndexOutOfBoundsException if there is no such element. +3:1307,1308c + * Returns the {@code index}-th value in the {@code iterable}'s {@link Iterator}, throwing + * {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1287a +2:1322c +3:1322c + Objects.requireNonNull(iterable, "The iterable must not be null."); +====3 +1:1292,1294c +2:1327,1329c + * Returns the index-th value in object, throwing + * IndexOutOfBoundsException if there is no such element or + * IllegalArgumentException if object is not an +3:1327,1329c + * Returns the {@code index}-th value in {@code object}, throwing + * {@code IndexOutOfBoundsException} if there is no such element or + * {@code IllegalArgumentException} if {@code object} is not an +====3 +1:1300,1301c +2:1335,1336c + *
  • Map -- the value returned is the Map.Entry in position + * index in the map's entrySet iterator, +3:1335,1336c + *
  • Map -- the value returned is the {@code Map.Entry} in position + * {@code index} in the map's {@code entrySet} iterator, +====3 +1:1304,1305c +2:1339,1340c + *
  • Array -- the index-th array entry is returned, + * if there is such an entry; otherwise an IndexOutOfBoundsException +3:1339,1340c + *
  • Array -- the {@code index}-th array entry is returned, + * if there is such an entry; otherwise an {@code IndexOutOfBoundsException} +====3 +1:1307c +2:1342c + *
  • Collection -- the value returned is the index-th object +3:1342c + *
  • Collection -- the value returned is the {@code index}-th object +====3 +1:1310c +2:1345c + * index-th object in the Iterator/Enumeration, if there +3:1345c + * {@code index}-th object in the Iterator/Enumeration, if there +====3 +1:1312c +2:1347c + * index (or to the end, if index exceeds the +3:1347c + * {@code index} (or to the end, if {@code index} exceeds the +====3 +1:1327c +2:1362c + if (object instanceof Map) { +3:1362c + if (object instanceof Map) { +====3 +1:1354,1355c +2:1389,1390c + * Returns the index-th Map.Entry in the map's entrySet, + * throwing IndexOutOfBoundsException if there is no such element. +3:1389,1390c + * Returns the {@code index}-th {@code Map.Entry} in the {@code map}'s {@code entrySet}, + * throwing {@code IndexOutOfBoundsException} if there is no such element. +====1 +1:1358c + * @param the key type in the {@link Map} +2:1393c +3:1393c + * @param the value type in the {@link Map} +====1 +1:1364c + public static Map.Entry get(final Map map, final int index) { +2:1399,1400c +3:1399,1400c + public static Map.Entry get(final Map map, final int index) { + Objects.requireNonNull(map, "The map must not be null."); +====3 +1:1392c +2:1428c + if (object instanceof Map) { +3:1428c + if (object instanceof Map) { +====1 +1:1499a +2:1536c +3:1536c + Objects.requireNonNull(array, "The array must not be null."); +====3 +1:1526c +2:1563c + * @param coll the collection to check +3:1563c + * @param collection the collection to check +==== +1:1530,1535c + public static boolean isFull(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); +2:1567,1570c + public static boolean isFull(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).isFull(); +3:1567,1570c + public static boolean isFull(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).isFull(); +====3 +1:1539c +2:1574c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +3:1574c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====3 +1:1559c +2:1594c + * @param coll the collection to check +3:1594c + * @param collection the collection to check +==== +1:1563,1568c + public static int maxSize(final Collection coll) { + if (coll == null) { + throw new NullPointerException("The collection must not be null"); + } + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); +2:1598,1601c + public static int maxSize(final Collection coll) { + Objects.requireNonNull(coll, "The collection must not be null"); + if (coll instanceof BoundedCollection) { + return ((BoundedCollection) coll).maxSize(); +3:1598,1601c + public static int maxSize(final Collection collection) { + Objects.requireNonNull(collection, "collection"); + if (collection instanceof BoundedCollection) { + return ((BoundedCollection) collection).maxSize(); +====3 +1:1572c +2:1605c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(coll); +3:1605c + UnmodifiableBoundedCollection.unmodifiableBoundedCollection(collection); +====3 +1:1649,1651c +2:1682,1684c + * @param a the first collection, must not be null + * @param b the second collection, must not be null + * @param c the comparator to use for the merge. +3:1682,1684c + * @param iterableA the first collection, must not be null + * @param iterableB the second collection, must not be null + * @param comparator the comparator to use for the merge. +==== +1:1658,1666c + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + + if (a == null || b == null) { + throw new NullPointerException("The collections must not be null"); + } + if (c == null) { + throw new NullPointerException("The comparator must not be null"); + } +2:1691,1695c + public static List collate(final Iterable a, final Iterable b, + final Comparator c, final boolean includeDuplicates) { + Objects.requireNonNull(a, "The first collection must not be null."); + Objects.requireNonNull(b, "The second collection must not be null."); + Objects.requireNonNull(c, "The comparator must not be null."); +3:1691,1696c + public static List collate(final Iterable iterableA, final Iterable iterableB, + final Comparator comparator, final boolean includeDuplicates) { + + Objects.requireNonNull(iterableA, "iterableA"); + Objects.requireNonNull(iterableB, "iterableB"); + Objects.requireNonNull(comparator, "comparator"); +====3 +1:1669,1670c +2:1698,1699c + final int totalSize = a instanceof Collection && b instanceof Collection ? + Math.max(1, ((Collection) a).size() + ((Collection) b).size()) : 10; +3:1699,1700c + final int totalSize = iterableA instanceof Collection && iterableB instanceof Collection ? + Math.max(1, ((Collection) iterableA).size() + ((Collection) iterableB).size()) : 10; +====3 +1:1672c +2:1701c + final Iterator iterator = new CollatingIterator<>(c, a.iterator(), b.iterator()); +3:1702c + final Iterator iterator = new CollatingIterator<>(comparator, iterableA.iterator(), iterableB.iterator()); +====1 +1:1713a +2:1743c +3:1744c + Objects.requireNonNull(collection, "The collection must not be null."); +====3 +1:1724,1727c +2:1754,1757c + * Returns a collection containing all the elements in collection + * that are also in retain. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless retain does not contain e, in which +3:1755,1758c + * Returns a collection containing all the elements in {@code collection} + * that are also in {@code retain}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code retain} does not contain {@code e}, in which +====3 +1:1729c +2:1759c + * the collection c and thus cannot call c.retainAll(retain);. +3:1760c + * the collection {@code c} and thus cannot call {@code c.retainAll(retain);}. +====3 +1:1731,1732c +2:1761,1762c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in retain. If it's contained, it's added +3:1762,1763c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code retain}. If it's contained, it's added +====3 +1:1734c +2:1764c + * retain that provides a fast (e.g. O(1)) implementation of +3:1765c + * {@code retain} that provides a fast (e.g. O(1)) implementation of +====3 +1:1741,1742c +2:1771,1772c + * @return a Collection containing all the elements of collection + * that occur at least once in retain. +3:1772,1773c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain}. +====1 +1:1746a +2:1777,1778c +3:1778,1779c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); +====3 +1:1752,1755c +2:1784,1787c + * collection that are also in retain. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless retain does not contain e, in which case +3:1785,1788c + * {@code collection} that are also in {@code retain}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code retain} does not contain {@code e}, in which case +====3 +1:1757,1758c +2:1789,1790c + * modify the collection c and thus cannot call + * c.retainAll(retain);. +3:1790,1791c + * modify the collection {@code c} and thus cannot call + * {@code c.retainAll(retain);}. +====3 +1:1762c +2:1794c + * in collection and retain. Hence this method is +3:1795c + * in {@code collection} and {@code retain}. Hence this method is +====3 +1:1771,1772c +2:1803,1804c + * @return a Collection containing all the elements of collection + * that occur at least once in retain according to the equator +3:1804,1805c + * @return a {@code Collection} containing all the elements of {@code collection} + * that occur at least once in {@code retain} according to the {@code equator} +====1 +1:1779c + +2:1811,1813c +3:1812,1814c + Objects.requireNonNull(collection, "The first collection must not be null."); + Objects.requireNonNull(retain, "The second collection must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1800,1802c + * @param input the collection will be operated, can't be null + * @param startIndex the start index (inclusive) to remove element, can't be less than 0 + * @param endIndex the end index (exclusive) to remove, can't be less than startIndex +2:1834,1836c +3:1835,1837c + * @param input the collection will be operated, must not be null + * @param startIndex the start index (inclusive) to remove element, must not be less than 0 + * @param endIndex the end index (exclusive) to remove, must not be less than startIndex +====1 +1:1807,1809c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1841c +3:1842c + Objects.requireNonNull(input, "The collection must not be null."); +====1 +1:1831,1833c + if (null == input) { + throw new IllegalArgumentException("The collection can't be null."); + } +2:1863c +3:1864c + Objects.requireNonNull(input, "The collection must not be null."); +====3 +1:1845,1846c +2:1875,1876c + Collection result = new ArrayList(count); + Iterator iterator = input.iterator(); +3:1876,1877c + final Collection result = new ArrayList<>(count); + final Iterator iterator = input.iterator(); +====3 +1:1861,1865c +2:1891,1895c + * Removes the elements in remove from collection. That is, this + * method returns a collection containing all the elements in c + * that are not in remove. The cardinality of an element e + * in the returned collection is the same as the cardinality of e + * in collection unless remove contains e, in which +3:1892,1896c + * Removes the elements in {@code remove} from {@code collection}. That is, this + * method returns a collection containing all the elements in {@code c} + * that are not in {@code remove}. The cardinality of an element {@code e} + * in the returned collection is the same as the cardinality of {@code e} + * in {@code collection} unless {@code remove} contains {@code e}, in which +====3 +1:1867c +2:1897c + * the collection c and thus cannot call collection.removeAll(remove);. +3:1898c + * the collection {@code c} and thus cannot call {@code collection.removeAll(remove);}. +====3 +1:1869,1870c +2:1899,1900c + * This implementation iterates over collection, checking each element in + * turn to see if it's contained in remove. If it's not contained, it's added +3:1900,1901c + * This implementation iterates over {@code collection}, checking each element in + * turn to see if it's contained in {@code remove}. If it's not contained, it's added +====3 +1:1872c +2:1902c + * remove that provides a fast (e.g. O(1)) implementation of +3:1903c + * {@code remove} that provides a fast (e.g. O(1)) implementation of +====3 +1:1878,1880c +2:1908,1910c + * @param remove the items to be removed from the returned collection + * @return a Collection containing all the elements of collection except + * any elements that also occur in remove. +3:1909,1911c + * @param remove the items to be removed from the returned {@code collection} + * @return a {@code Collection} containing all the elements of {@code collection} except + * any elements that also occur in {@code remove}. +====3 +1:1886c +2:1916c + } +3:1917c + } +====3 +1:1889c +2:1919c + * Removes all elements in remove from collection. +3:1920c + * Removes all elements in {@code remove} from {@code collection}. +====3 +1:1891,1894c +2:1921,1924c + * collection that are not in remove. The + * cardinality of an element e in the returned collection is + * the same as the cardinality of e in collection + * unless remove contains e, in which case the +3:1922,1925c + * {@code collection} that are not in {@code remove}. The + * cardinality of an element {@code e} in the returned collection is + * the same as the cardinality of {@code e} in {@code collection} + * unless {@code remove} contains {@code e}, in which case the +====3 +1:1896,1897c +2:1926,1927c + * the collection c and thus cannot call + * collection.removeAll(remove). +3:1927,1928c + * the collection {@code c} and thus cannot call + * {@code collection.removeAll(remove)}. +====3 +1:1901c +2:1931c + * in collection and remove. Hence this method is +3:1932c + * in {@code collection} and {@code remove}. Hence this method is +====3 +1:1910,1911c +2:1940,1941c + * @return a Collection containing all the elements of collection + * except any element that if equal according to the equator +3:1941,1942c + * @return a {@code Collection} containing all the elements of {@code collection} + * except any element that if equal according to the {@code equator} +====1 +1:1918c + +2:1948,1950c +3:1949,1951c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(remove, "The items to be removed must not be null."); + Objects.requireNonNull(equator, "The equator must not be null."); +====1 +1:1960a +2:1993c +3:1994c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1977a +2:2011c +3:2012c + Objects.requireNonNull(collection, "The collection must not be null."); +====1 +1:1997a +2:2032,2033c +3:2033,2034c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(predicate, "The predicate must not be null."); +====1 +1:2020a +2:2057,2058c +3:2058,2059c + Objects.requireNonNull(collection, "The collection must not be null."); + Objects.requireNonNull(transformer, "The transformer must not be null."); +==== +1:2035,2037c + if (collection == null) { + throw new NullPointerException("Collection must not be null."); + } +2:2073c + Objects.requireNonNull(collection, "The collection must not be null."); +3:2074c + Objects.requireNonNull(collection, "collection"); diff --git a/src/python/merge_conflict_analysis_diffs/184/spork/diff_CollectionUtilsTest.java.txt b/src/python/merge_conflict_analysis_diffs/184/spork/diff_CollectionUtilsTest.java.txt new file mode 100644 index 0000000000..27920f0ad8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/184/spork/diff_CollectionUtilsTest.java.txt @@ -0,0 +1,652 @@ +====1 +1:193a +2:194,198c +3:194,198c + @Test(expected = NullPointerException.class) + public void testGetCardinalityMapNull() { + CollectionUtils.getCardinalityMap(null); + } + +====1 +1:349a +2:355,377c +3:355,377c + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.containsAny(null, list); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyNullColl3() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:383a +2:412,433c +3:412,433c + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl1() { + final String[] oneArr = {"1"}; + CollectionUtils.containsAny(null, oneArr); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final Collection list2 = null; + CollectionUtils.containsAny(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testContainsAnyInArrayNullArray() { + final Collection list = new ArrayList<>(1); + list.add("1"); + final String[] array = null; + CollectionUtils.containsAny(list, array); + } + +====1 +1:402a +2:453,466c +3:453,466c + @Test(expected = NullPointerException.class) + public void testUnionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(null, list); + } + + @Test(expected = NullPointerException.class) + public void testUnionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.union(list, null); + } + +====1 +1:421a +2:486,499c +3:486,499c + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIntersectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.intersection(list, null); + } + +====1 +1:440a +2:519,532c +3:519,532c + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(null, list); + } + + @Test(expected = NullPointerException.class) + public void testDisjunctionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.disjunction(list, null); + } + +====1 +1:475a +2:568,581c +3:568,581c + @Test(expected = NullPointerException.class) + public void testSubtractNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(null, list); + } + + @Test(expected = NullPointerException.class) + public void testSubtractNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.subtract(list, null); + } + +====1 +1:541a +2:648,661c +3:648,661c + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isSubCollection(list, null); + } + +====1 +1:623a +2:744,789c +3:744,789c + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(null, list, e); + } + + @Test(expected = NullPointerException.class) + public void testIsEqualCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add(1); + + final Equator e = new Equator() { + @Override + public boolean equate(final Integer o1, final Integer o2) { + if (o1.intValue() % 2 == 0 ^ o2.intValue() % 2 == 0) { + return false; + } + return true; + } + + @Override + public int hash(final Integer o) { + return o.intValue() % 2 == 0 ? Integer.valueOf(0).hashCode() : Integer.valueOf(1).hashCode(); + } + }; + + CollectionUtils.isEqualCollection(list, null, e); + } + +====1 +1:645a +2:812,825c +3:812,825c + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl1() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(null, list); + } + + @Test(expected = NullPointerException.class) + public void testIsProperSubCollectionNullColl2() { + final Collection list = new ArrayList<>(1); + list.add("1"); + CollectionUtils.isProperSubCollection(list, null); + } + +====3 +1:655c +2:835c + assertNull(CollectionUtils.find(null,testPredicate)); +3:835c + assertNull(CollectionUtils.find(null, testPredicate)); +====3 +1:1279c +2:1459c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long)input).intValue()); +3:1459c + Transformer TRANSFORM_TO_INTEGER = input -> Integer.valueOf(((Long) input).intValue()); +====1 +1:1331a +2:1512,1516c +3:1512,1516c + @Test(expected = NullPointerException.class) + public void testAddIgnoreNullNullColl() { + CollectionUtils.addIgnoreNull(null, "1"); + } + +====1 +1:1338,1349c + try { + CollectionUtils.predicatedCollection(new ArrayList(), null); + fail("Expecting NullPointerException for null predicate."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.predicatedCollection(null, predicate); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1523,1534c +3:1523,1534c + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullColl() { + final Predicate predicate = PredicateUtils.instanceofPredicate(Integer.class); + CollectionUtils.predicatedCollection(null, predicate); + } + + @Test(expected = NullPointerException.class) + public void testPredicatedCollectionNullPredicate() { + final Collection list = new ArrayList<>(); + CollectionUtils.predicatedCollection(list, null); +====1 +1:1358,1362c + try { + CollectionUtils.isFull(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1542a +3:1542a +====1 +1:1372a +2:1553,1557c +3:1553,1557c + @Test(expected = NullPointerException.class) + public void testIsFullNullColl() { + CollectionUtils.isFull(null); + } + +====1 +1:1385,1389c + try { + CollectionUtils.maxSize(null); + fail(); + } catch (final NullPointerException ex) { + } +2:1569a +3:1569a +====1 +1:1399a +2:1580,1584c +3:1580,1584c + @Test(expected = NullPointerException.class) + public void testMaxSizeNullColl() { + CollectionUtils.maxSize(null); + } + +====1 +1:1466a +2:1652c +3:1652c + } +====1 +1:1468,1472c + try { + CollectionUtils.retainAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1654,1665c +3:1654,1665c + @Test(expected = NullPointerException.class) + public void testRetainAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.retainAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRetainAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.retainAll(base, null); +====3 +1:1477c +2:1670c + List list = new ArrayList<>(); +3:1670c + final List list = new ArrayList<>(); +====1 +1:1493c + @Test(expected=IllegalArgumentException.class) +2:1686c +3:1686c + @Test(expected=NullPointerException.class) +==== +1:1495,1496c + Collection list = null; + Collection result = CollectionUtils.removeRange(list, 0, 0); +2:1688,1689c + Collection list = null; + CollectionUtils.removeRange(list, 0, 0); +3:1688,1689c + final Collection list = null; + CollectionUtils.removeRange(list, 0, 0); +====3 +1:1501c +2:1694c + Collection list = new ArrayList<>(); +3:1694c + final Collection list = new ArrayList<>(); +====1 +1:1503c + Collection result = CollectionUtils.removeRange(list, -1, 1); +2:1696c +3:1696c + CollectionUtils.removeRange(list, -1, 1); +====3 +1:1508c +2:1701c + Collection list = new ArrayList<>(); +3:1701c + final Collection list = new ArrayList<>(); +====1 +1:1510c + Collection result = CollectionUtils.removeRange(list, 0, -1); +2:1703c +3:1703c + CollectionUtils.removeRange(list, 0, -1); +====3 +1:1515c +2:1708c + Collection list = new ArrayList<>(); +3:1708c + final Collection list = new ArrayList<>(); +====1 +1:1518c + Collection result = CollectionUtils.removeRange(list, 1, 0); +2:1711c +3:1711c + CollectionUtils.removeRange(list, 1, 0); +====3 +1:1523c +2:1716c + Collection list = new ArrayList<>(); +3:1716c + final Collection list = new ArrayList<>(); +====1 +1:1525c + Collection result = CollectionUtils.removeRange(list, 0, 2); +2:1718c +3:1718c + CollectionUtils.removeRange(list, 0, 2); +====3 +1:1530c +2:1723c + List list = new ArrayList<>(); +3:1723c + final List list = new ArrayList<>(); +====1 +1:1559c + @Test(expected=IllegalArgumentException.class) +2:1752c +3:1752c + @Test(expected=NullPointerException.class) +==== +1:1561,1562c + Collection list = null; + Collection result = CollectionUtils.removeCount(list, 0, 1); +2:1754,1755c + Collection list = null; + CollectionUtils.removeCount(list, 0, 1); +3:1754,1755c + final Collection list = null; + CollectionUtils.removeCount(list, 0, 1); +==== +1:1567,1568c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, -1, 1); +2:1760,1761c + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); +3:1760,1761c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, -1, 1); +==== +1:1573,1574c + Collection list = new ArrayList<>(); + Collection result = CollectionUtils.removeCount(list, 0, -1); +2:1766,1767c + Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); +3:1766,1767c + final Collection list = new ArrayList<>(); + CollectionUtils.removeCount(list, 0, -1); +====3 +1:1579c +2:1772c + Collection list = new ArrayList<>(); +3:1772c + final Collection list = new ArrayList<>(); +====1 +1:1581c + Collection result = CollectionUtils.removeCount(list, 0, 2); +2:1774c +3:1774c + CollectionUtils.removeCount(list, 0, 2); +====1 +1:1607a +2:1801c +3:1801c + } +====1 +1:1609,1613c + try { + CollectionUtils.removeAll(null, null); + fail("expecting NullPointerException"); + } catch (final NullPointerException npe) { + } // this is what we want +2:1803,1814c +3:1803,1814c + @Test(expected = NullPointerException.class) + public void testRemoveAllNullBaseColl() { + final List sub = new ArrayList<>(); + sub.add("A"); + CollectionUtils.removeAll(null, sub); + } + + @Test(expected = NullPointerException.class) + public void testRemoveAllNullSubColl() { + final List base = new ArrayList<>(); + base.add("A"); + CollectionUtils.removeAll(base, null); +====1 +1:1622,1633c + try { + CollectionUtils.transformingCollection(new ArrayList<>(), null); + fail("Expecting NullPointerException for null transformer."); + } catch (final NullPointerException ex) { + // expected + } + try { + CollectionUtils.transformingCollection(null, transformer); + fail("Expecting NullPointerException for null collection."); + } catch (final NullPointerException ex) { + // expected + } +2:1823,1834c +3:1823,1834c + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullColl() { + final Transformer transformer = TransformerUtils.nopTransformer(); + CollectionUtils.transformingCollection(null, transformer); + } + + @Test(expected = NullPointerException.class) + public void testTransformingCollectionNullTransformer() { + final List list = new ArrayList<>(); + CollectionUtils.transformingCollection(list, null); +====3 +1:1740c +2:1941c + CollectionUtils.addAll(collectionA, new Integer[]{5}); +3:1941c + CollectionUtils.addAll(collectionA, 5); +==== +1:1744c + @Test(expected=IndexOutOfBoundsException.class) +2:1945,1979c + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected=IndexOutOfBoundsException.class) +3:1945,1979c + @Test(expected = NullPointerException.class) + public void testaddAllNullColl1() { + final List list = new ArrayList<>(); + CollectionUtils.addAll(null, list); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl2() { + final List list = new ArrayList<>(); + final Iterable list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl3() { + final List list = new ArrayList<>(); + final Iterator list2 = null; + CollectionUtils.addAll(list, list2); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl4() { + final List list = new ArrayList<>(); + final Enumeration enumArray = null; + CollectionUtils.addAll(list, enumArray); + } + + @Test(expected = NullPointerException.class) + public void testAddAllNullColl5() { + final List list = new ArrayList<>(); + final Integer[] array = null; + CollectionUtils.addAll(list, array); + } + + @Test(expected = IndexOutOfBoundsException.class) +====3 +1:1746c +2:1981c + CollectionUtils.get((Object)collectionA, -3); +3:1981c + CollectionUtils.get((Object) collectionA, -3); +====3 +1:1749c +2:1984c + @Test(expected=IndexOutOfBoundsException.class) +3:1984c + @Test(expected = IndexOutOfBoundsException.class) +====3 +1:1751c +2:1986c + CollectionUtils.get((Object)collectionA.iterator(), 30); +3:1986c + CollectionUtils.get((Object) collectionA.iterator(), 30); +====3 +1:1754c +2:1989c + @Test(expected=IllegalArgumentException.class) +3:1989c + @Test(expected = IllegalArgumentException.class) +====3 +1:1756c +2:1991c + CollectionUtils.get((Object)null, 0); +3:1991c + CollectionUtils.get((Object) null, 0); +====3 +1:1761,1762c +2:1996,1997c + assertEquals(2, CollectionUtils.get((Object)collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object)collectionA.iterator(), 2)); +3:1996,1997c + assertEquals(2, CollectionUtils.get((Object) collectionA, 2)); + assertEquals(2, CollectionUtils.get((Object) collectionA.iterator(), 2)); +====3 +1:1764c +2:1999c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object)map, 0)); +3:1999c + assertEquals(map.entrySet().iterator().next(), CollectionUtils.get((Object) map, 0)); +====1 +1:1795a +2:2031,2035c +3:2031,2035c + @Test(expected = NullPointerException.class) + public void testReverseArrayNull() { + CollectionUtils.reverseArray(null); + } + +====1 +1:1828a +2:2069,2073c +3:2069,2073c + public void collateException0() { + CollectionUtils.collate(null, collectionC); + } + + @Test(expected=NullPointerException.class) diff --git a/src/python/merge_conflict_analysis_diffs/1890/git_hires_merge/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/1890/git_hires_merge/diff_README.md.txt new file mode 100644 index 0000000000..209f764bb2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/git_hires_merge/diff_README.md.txt @@ -0,0 +1,21 @@ +====1 +1:21c + 1.5.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +2:21c +3:21c + 2.1.1-M2-SNAPSHOT | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +====1 +1:152a +2:153c +3:153c + PerlClientCodegen.java +====1 +1:153a +2:155c +3:155c + Python3ClientCodegen.java +====1 +1:154a +2:157c +3:157c + Qt5CPPGenerator.java diff --git a/src/python/merge_conflict_analysis_diffs/1890/git_hires_merge/diff_index.html.txt b/src/python/merge_conflict_analysis_diffs/1890/git_hires_merge/diff_index.html.txt new file mode 100644 index 0000000000..d32d62bf78 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/git_hires_merge/diff_index.html.txt @@ -0,0 +1,790 @@ +====1 +1:5c + +2:4a +3:4a +====1 +1:22,43c +
    post: /user
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    body (optional)
    + +
    Body Parameter — Created user object
    + +
    +

    Return type

    + +
    + + + +
    + +
    +
    post: /user/createWithArray
    +
    createUsersWithArrayInput Creates list of users with given input array
    +2:21,23c +3:21,23c +
    put: /pet
    +
    Pet
    +
    updatePet Update an existing pet
    +====1 +1:50c +
    Body Parameter — List of user object
    +2:30c +3:30c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:62,63c +
    post: /user/createWithList
    +
    createUsersWithListInput Creates list of users with given input array
    +2:42,44c +3:42,44c +
    post: /pet
    +
    Pet
    +
    addPet Add a new pet to the store
    +====1 +1:70c +
    Body Parameter — List of user object
    +2:51c +3:51c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:82,84c +
    get: /user/login
    +
    loginUser Logs user into the system
    +
    +2:63,66c +3:63,66c +
    get: /pet/findByStatus
    +
    Pet
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +====1 +1:88,91c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    +2:70c +3:70c +
    status (optional)
    +====1 +1:93c +
    Query Parameter — The password for login in clear text
    +2:72c +3:72c +
    Query Parameter — Status values that need to be considered for filter default: available
    +====1 +1:98c + +2:77c +3:77c + +====1 +1:103c +
    "aeiou"
    +2:82,95c +3:82,95c +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    +====1 +1:107c +
    string
    +2:99,104c +3:99,104c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:113,115c +
    get: /user/logout
    +
    logoutUser Logs out current logged in user session
    +
    +2:110,113c +3:110,113c +
    get: /pet/findByTags
    +
    Pet
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +====1 +1:118a +2:117,119c +3:117,119c +
    tags (optional)
    + +
    Query Parameter — Tags to filter by
    +====1 +1:123c +
    +2:124c +3:124c + +====1 +1:125a +2:127,152c +3:127,152c +

    Example data

    +
    Content-Type: application/json
    +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    + +

    Example data

    +
    Content-Type: application/xml
    +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    + +====1 +1:130,132c +
    get: /user/{username}
    +
    getUserByName Get user by user name
    +
    +2:157,160c +3:157,160c +
    get: /pet/{petId}
    +
    Pet
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +====1 +1:136c +
    username (required)
    +2:164c +3:164c +
    petId (required)
    +====1 +1:138c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +2:166c +3:166c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:143c + +2:171c +3:171c + +====1 +1:148c +
    {\n  "id" : 123456789,\n  "lastName" : "aeiou",\n  "phone" : "aeiou",\n  "username" : "aeiou",\n  "email" : "aeiou",\n  "userStatus" : 123,\n  "firstName" : "aeiou",\n  "password" : "aeiou"\n}
    +2:176,189c +3:176,189c +
    {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  }
    +====1 +1:152c +
    \n  123456\n  string\n  string\n  string\n  string\n  string\n  string\n  0\n
    +2:193,198c +3:193,198c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:158,160c +
    put: /user/{username}
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    +2:204,207c +3:204,207c +
    post: /pet/{petId}
    +
    Pet
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +====1 +1:164c +
    username (required)
    +2:211c +3:211c +
    petId (required)
    +====1 +1:166,167c +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    +2:213,214c +3:213,214c +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    +====1 +1:169c +
    Body Parameter — Updated user object
    +2:216,219c +3:216,219c +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    + +
    Form Parameter — Updated status of the pet
    +====1 +1:181,183c +
    delete: /user/{username}
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +2:231,234c +3:231,234c +
    delete: /pet/{petId}
    +
    Pet
    +
    deletePet Deletes a pet
    +
    +====1 +1:187c +
    username (required)
    +2:238c +3:238c +
    api_key (optional)
    +====1 +1:189c +
    Path Parameter — The name that needs to be deleted
    +2:240,243c +3:240,243c +
    Header Parameter
    +
    petId (required)
    + +
    Path Parameter — Pet id to delete
    +====1 +1:200,201c + + +2:253a +3:253a +====1 +1:203,204c +
    put: /pet
    +
    updatePet Update an existing pet
    +2:255,257c +3:255,257c +
    post: /pet/{petId}/uploadImage
    +
    Pet
    +
    uploadFile uploads an image
    +====1 +1:209c +
    body (optional)
    +2:262c +3:262c +
    petId (required)
    +====1 +1:211c +
    Body Parameter — Pet object that needs to be added to the store
    +2:264,270c +3:264,270c +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    + +
    Form Parameter — file to upload
    +====1 +1:223,225c +
    post: /pet
    +
    addPet Add a new pet to the store
    +
    +2:282,285c +3:282,285c +
    get: /store/inventory
    +
    Store
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +====1 +1:229,231c +
    body (optional)
    + +
    Body Parameter — Pet object that needs to be added to the store
    +2:288a +3:288a +====1 +1:236c +
    +2:293c +3:293c + +====1 +1:238a +2:296,305c +3:296,305c +

    Example data

    +
    Content-Type: application/json
    +
    {
    +    "key" : 123
    +  }
    + +

    Example data

    +
    Content-Type: application/xml
    +
    not implemented com.wordnik.swagger.models.properties.MapProperty@3e
    + +====1 +1:243,245c +
    get: /pet/findByStatus
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +2:310,313c +3:310,313c +
    post: /store/order
    +
    Store
    +
    placeOrder Place an order for a pet
    +
    +====1 +1:249c +
    status (optional)
    +2:317c +3:317c +
    body (optional)
    +====1 +1:251c +
    Query Parameter — Status values that need to be considered for filter
    +2:319c +3:319c +
    Body Parameter — order placed for purchasing the pet
    +====1 +1:256c + +2:324c +3:324c + +====1 +1:261c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:329,336c +3:329,336c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.118+0000"
    +  }
    +====1 +1:265c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:340,347c +3:340,347c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.120Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:271,273c +
    get: /pet/findByTags
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +2:353,356c +3:353,356c +
    get: /store/order/{orderId}
    +
    Store
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +====1 +1:277c +
    tags (optional)
    +2:360c +3:360c +
    orderId (required)
    +====1 +1:279c +
    Query Parameter — Tags to filter by
    +2:362c +3:362c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:284c + +2:367c +3:367c + +====1 +1:289c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:372,379c +3:372,379c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.121+0000"
    +  }
    +====1 +1:293c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:383,390c +3:383,390c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.122Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:299,301c +
    get: /pet/{petId}
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +2:396,399c +3:396,399c +
    delete: /store/order/{orderId}
    +
    Store
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +====1 +1:305c +
    petId (required)
    +2:403c +3:403c +
    orderId (required)
    +====1 +1:307c +
    Path Parameter — ID of pet that needs to be fetched
    +2:405c +3:405c +
    Path Parameter — ID of the order that needs to be deleted
    +====1 +1:312c + +2:410c +3:410c +
    +====1 +1:315,322c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    + +2:412a +3:412a +====1 +1:327,329c +
    post: /pet/{petId}
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +2:417,420c +3:417,420c +
    post: /user
    +
    User
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    +====1 +1:333,339c +
    petId (required)
    + +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    + +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    +2:424c +3:424c +
    body (optional)
    +====1 +1:341c +
    Form Parameter — Updated status of the pet
    +2:426c +3:426c +
    Body Parameter — Created user object
    +====1 +1:353,354c +
    delete: /pet/{petId}
    +
    deletePet Deletes a pet
    +2:438,440c +3:438,440c +
    post: /user/createWithArray
    +
    User
    +
    createUsersWithArrayInput Creates list of users with given input array
    +====1 +1:359,362c +
    api_key (optional)
    + +
    Header Parameter
    +
    petId (required)
    +2:445c +3:445c +
    body (optional)
    +====1 +1:364c +
    Path Parameter — Pet id to delete
    +2:447c +3:447c +
    Body Parameter — List of user object
    +====1 +1:376,377c +
    post: /pet/{petId}/uploadImage
    +
    uploadFile uploads an image
    +2:459,461c +3:459,461c +
    post: /user/createWithList
    +
    User
    +
    createUsersWithListInput Creates list of users with given input array
    +====1 +1:382,388c +
    petId (required)
    + +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    +2:466c +3:466c +
    body (optional)
    +====1 +1:390c +
    Form Parameter — file to upload
    +2:468c +3:468c +
    Body Parameter — List of user object
    +====1 +1:401,402c + + +2:478a +3:478a +====1 +1:404,406c +
    get: /store/inventory
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +2:480,483c +3:480,483c +
    get: /user/login
    +
    User
    +
    loginUser Logs user into the system
    +
    +====1 +1:409a +2:487,492c +3:487,492c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    + +
    Query Parameter — The password for login in clear text
    +====1 +1:414c + +2:497c +3:497c + +====1 +1:419c +
    {\n  "key" : 123\n}
    +2:502c +3:502c +
    "aeiou"
    +====1 +1:423c +
    not implemented com.wordnik.swagger.models.properties.MapProperty@2acca551
    +2:506c +3:506c +
    string
    +====1 +1:429,430c +
    post: /store/order
    +
    placeOrder Place an order for a pet
    +2:512,514c +3:512,514c +
    get: /user/logout
    +
    User
    +
    logoutUser Logs out current logged in user session
    +====1 +1:435,437c +
    body (optional)
    + +
    Body Parameter — order placed for purchasing the pet
    +2:518a +3:518a +====1 +1:442c + +2:523c +3:523c +
    +====1 +1:445,452c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.855+0000"\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.857Z\n  string\n  true\n
    + +2:525a +3:525a +====1 +1:457,459c +
    get: /store/order/{orderId}
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +2:530,533c +3:530,533c +
    get: /user/{username}
    +
    User
    +
    getUserByName Get user by user name
    +
    +====1 +1:463c +
    orderId (required)
    +2:537c +3:537c +
    username (required)
    +====1 +1:465c +
    Path Parameter — ID of pet that needs to be fetched
    +2:539c +3:539c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +====1 +1:470c + +2:544c +3:544c + +====1 +1:475c +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.859+0000"\n}
    +2:549,558c +3:549,558c +
    {
    +    "id" : 1,
    +    "username" : "johnp",
    +    "firstName" : "John",
    +    "lastName" : "Public",
    +    "email" : "johnp@swagger.io",
    +    "password" : "-secret-",
    +    "phone" : "0123456789",
    +    "userStatus" : 0
    +  }
    +====1 +1:477,479c +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.859Z\n  string\n  true\n
    +2:560,582c +3:560,582c +
    +
    + +
    +
    put: /user/{username}
    +
    User
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    username (required)
    + +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    + +
    Body Parameter — Updated user object
    + +
    +

    Return type

    + +
    + +====1 +1:485,487c +
    delete: /store/order/{orderId}
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +2:588,591c +3:588,591c +
    delete: /user/{username}
    +
    User
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +====1 +1:491c +
    orderId (required)
    +2:595c +3:595c +
    username (required)
    +====1 +1:493c +
    Path Parameter — ID of the order that needs to be deleted
    +2:597c +3:597c +
    Path Parameter — The name that needs to be deleted
    +====1 +1:633a +2:738c +3:738c + margin-bottom: 2px; +====1 +1:714a +2:820,832c +3:820,832c + .method-tags { + text-align: right; + } + + .method-tag { + background: none repeat scroll 0% 0% #24A600; + border-radius: 3px; + padding: 2px 10px; + margin: 2px; + color: #FFF; + display: inline-block; + text-decoration: none; + } diff --git a/src/python/merge_conflict_analysis_diffs/1890/git_hires_merge/diff_pom.mustache.txt b/src/python/merge_conflict_analysis_diffs/1890/git_hires_merge/diff_pom.mustache.txt new file mode 100644 index 0000000000..980e2155c8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/git_hires_merge/diff_pom.mustache.txt @@ -0,0 +1,29 @@ +====1 +1:64a +2:65,83c +3:65,83c + + org.codehaus.mojo + build-helper-maven-plugin + 1.9.1 + + + add-source + generate-sources + + add-source + + + + src/gen/java + + + + + +====1 +1:132c + 1.5.0-M2 +2:151c +3:151c + 1.5.2-M2 diff --git a/src/python/merge_conflict_analysis_diffs/1890/git_hires_merge/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1890/git_hires_merge/diff_pom.xml.txt new file mode 100644 index 0000000000..39777c50de --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/git_hires_merge/diff_pom.xml.txt @@ -0,0 +1,6 @@ +====1 +1:132c + 1.5.0-M2 +2:132c +3:132c + 1.5.2-M2 diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort/diff_README.md.txt new file mode 100644 index 0000000000..6e7838f2c6 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort/diff_README.md.txt @@ -0,0 +1,28 @@ +==== +1:21c + 1.5.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +2:21,27c + <<<<<<< HEAD + 2.1.1-M2-SNAPSHOT | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) + ||||||| c04f947a85 + 1.5.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) + ======= + 2.1.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) + >>>>>>> TEMP_RIGHT_BRANCH +3:21c + 2.1.1-M2-SNAPSHOT | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +====1 +1:152a +2:159c +3:153c + PerlClientCodegen.java +====1 +1:153a +2:161c +3:155c + Python3ClientCodegen.java +====1 +1:154a +2:163c +3:157c + Qt5CPPGenerator.java diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort/diff_index.html.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort/diff_index.html.txt new file mode 100644 index 0000000000..5d2de91649 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort/diff_index.html.txt @@ -0,0 +1,862 @@ +====1 +1:5c + +2:4a +3:4a +====1 +1:22,43c +
    post: /user
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    body (optional)
    + +
    Body Parameter — Created user object
    + +
    +

    Return type

    + +
    + + +
    +
    + +
    +
    post: /user/createWithArray
    +
    createUsersWithArrayInput Creates list of users with given input array
    +2:21,23c +3:21,23c +
    put: /pet
    +
    Pet
    +
    updatePet Update an existing pet
    +====1 +1:50c +
    Body Parameter — List of user object
    +2:30c +3:30c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:62,63c +
    post: /user/createWithList
    +
    createUsersWithListInput Creates list of users with given input array
    +2:42,44c +3:42,44c +
    post: /pet
    +
    Pet
    +
    addPet Add a new pet to the store
    +====1 +1:70c +
    Body Parameter — List of user object
    +2:51c +3:51c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:82,84c +
    get: /user/login
    +
    loginUser Logs user into the system
    +
    +2:63,66c +3:63,66c +
    get: /pet/findByStatus
    +
    Pet
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +====1 +1:88,91c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    +2:70c +3:70c +
    status (optional)
    +====1 +1:93c +
    Query Parameter — The password for login in clear text
    +2:72c +3:72c +
    Query Parameter — Status values that need to be considered for filter default: available
    +====1 +1:98c + +2:77c +3:77c + +====1 +1:103c +
    "aeiou"
    +2:82,95c +3:82,95c +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    +====1 +1:107c +
    string
    +2:99,104c +3:99,104c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:113,115c +
    get: /user/logout
    +
    logoutUser Logs out current logged in user session
    +
    +2:110,113c +3:110,113c +
    get: /pet/findByTags
    +
    Pet
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +====1 +1:118a +2:117,119c +3:117,119c +
    tags (optional)
    + +
    Query Parameter — Tags to filter by
    +====1 +1:123c +
    +2:124c +3:124c + +====1 +1:125a +2:127,152c +3:127,152c +

    Example data

    +
    Content-Type: application/json
    +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    + +

    Example data

    +
    Content-Type: application/xml
    +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    + +====1 +1:130,132c +
    get: /user/{username}
    +
    getUserByName Get user by user name
    +
    +2:157,160c +3:157,160c +
    get: /pet/{petId}
    +
    Pet
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +====1 +1:136c +
    username (required)
    +2:164c +3:164c +
    petId (required)
    +====1 +1:138c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +2:166c +3:166c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:143c + +2:171c +3:171c + +====1 +1:148c +
    {\n  "id" : 123456789,\n  "lastName" : "aeiou",\n  "phone" : "aeiou",\n  "username" : "aeiou",\n  "email" : "aeiou",\n  "userStatus" : 123,\n  "firstName" : "aeiou",\n  "password" : "aeiou"\n}
    +2:176,189c +3:176,189c +
    {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  }
    +====1 +1:152c +
    \n  123456\n  string\n  string\n  string\n  string\n  string\n  string\n  0\n
    +2:193,198c +3:193,198c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:158,160c +
    put: /user/{username}
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    +2:204,207c +3:204,207c +
    post: /pet/{petId}
    +
    Pet
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +====1 +1:164c +
    username (required)
    +2:211c +3:211c +
    petId (required)
    +====1 +1:166,167c +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    +2:213,214c +3:213,214c +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    +====1 +1:169c +
    Body Parameter — Updated user object
    +2:216,219c +3:216,219c +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    + +
    Form Parameter — Updated status of the pet
    +====1 +1:181,183c +
    delete: /user/{username}
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +2:231,234c +3:231,234c +
    delete: /pet/{petId}
    +
    Pet
    +
    deletePet Deletes a pet
    +
    +====1 +1:187c +
    username (required)
    +2:238c +3:238c +
    api_key (optional)
    +====1 +1:189c +
    Path Parameter — The name that needs to be deleted
    +2:240,243c +3:240,243c +
    Header Parameter
    +
    petId (required)
    + +
    Path Parameter — Pet id to delete
    +====1 +1:200,201c + + +2:253a +3:253a +====1 +1:203,204c +
    put: /pet
    +
    updatePet Update an existing pet
    +2:255,257c +3:255,257c +
    post: /pet/{petId}/uploadImage
    +
    Pet
    +
    uploadFile uploads an image
    +====1 +1:209c +
    body (optional)
    +2:262c +3:262c +
    petId (required)
    +====1 +1:211c +
    Body Parameter — Pet object that needs to be added to the store
    +2:264,270c +3:264,270c +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    + +
    Form Parameter — file to upload
    +====1 +1:223,225c +
    post: /pet
    +
    addPet Add a new pet to the store
    +
    +2:282,285c +3:282,285c +
    get: /store/inventory
    +
    Store
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +====1 +1:229,231c +
    body (optional)
    + +
    Body Parameter — Pet object that needs to be added to the store
    +2:288a +3:288a +====1 +1:236c +
    +2:293c +3:293c + +==== +1:238a +2:296,311c +

    Example data

    +
    Content-Type: application/json
    +
    {
    +    "key" : 123
    +  }
    + +

    Example data

    +
    Content-Type: application/xml
    + <<<<<<< HEAD +
    not implemented com.wordnik.swagger.models.properties.MapProperty@3e
    + ||||||| c04f947a85 +
    not implemented com.wordnik.swagger.models.properties.MapProperty@2acca551
    + ======= +
    not implemented com.wordnik.swagger.models.properties.MapProperty@787b217
    + >>>>>>> TEMP_RIGHT_BRANCH + +3:296,305c +

    Example data

    +
    Content-Type: application/json
    +
    {
    +    "key" : 123
    +  }
    + +

    Example data

    +
    Content-Type: application/xml
    +
    not implemented com.wordnik.swagger.models.properties.MapProperty@3e
    + +====1 +1:243,245c +
    get: /pet/findByStatus
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +2:316,319c +3:310,313c +
    post: /store/order
    +
    Store
    +
    placeOrder Place an order for a pet
    +
    +====1 +1:249c +
    status (optional)
    +2:323c +3:317c +
    body (optional)
    +====1 +1:251c +
    Query Parameter — Status values that need to be considered for filter
    +2:325c +3:319c +
    Body Parameter — order placed for purchasing the pet
    +====1 +1:256c + +2:330c +3:324c + +==== +1:261c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:335,348c + <<<<<<< HEAD +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.118+0000"
    +  }
    + ||||||| c04f947a85 +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.855+0000"\n}
    + ======= +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-06T14:06:47.931+0000"\n}
    + >>>>>>> TEMP_RIGHT_BRANCH +3:329,336c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.118+0000"
    +  }
    +==== +1:265c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:352,365c + <<<<<<< HEAD +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.120Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    + ||||||| c04f947a85 +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.857Z\n  string\n  true\n
    + ======= +
    \n  123456\n  123456\n  0\n  2015-04-06T08:06:47.934Z\n  string\n  true\n
    + >>>>>>> TEMP_RIGHT_BRANCH +3:340,347c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.120Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:271,273c +
    get: /pet/findByTags
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +2:371,374c +3:353,356c +
    get: /store/order/{orderId}
    +
    Store
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +====1 +1:277c +
    tags (optional)
    +2:378c +3:360c +
    orderId (required)
    +====1 +1:279c +
    Query Parameter — Tags to filter by
    +2:380c +3:362c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:284c + +2:385c +3:367c + +==== +1:289c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:390,403c + <<<<<<< HEAD +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.121+0000"
    +  }
    + ||||||| c04f947a85 +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.859+0000"\n}
    + ======= +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-06T14:06:47.935+0000"\n}
    + >>>>>>> TEMP_RIGHT_BRANCH +3:372,379c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.121+0000"
    +  }
    +==== +1:293c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:407,420c + <<<<<<< HEAD +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.122Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    + ||||||| c04f947a85 +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.859Z\n  string\n  true\n
    + ======= +
    \n  123456\n  123456\n  0\n  2015-04-06T08:06:47.935Z\n  string\n  true\n
    + >>>>>>> TEMP_RIGHT_BRANCH +3:383,390c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.122Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:299,301c +
    get: /pet/{petId}
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +2:426,429c +3:396,399c +
    delete: /store/order/{orderId}
    +
    Store
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +====1 +1:305c +
    petId (required)
    +2:433c +3:403c +
    orderId (required)
    +====1 +1:307c +
    Path Parameter — ID of pet that needs to be fetched
    +2:435c +3:405c +
    Path Parameter — ID of the order that needs to be deleted
    +====1 +1:312c + +2:440c +3:410c +
    +====1 +1:315,322c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    + +2:442a +3:412a +====1 +1:327,329c +
    post: /pet/{petId}
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +2:447,450c +3:417,420c +
    post: /user
    +
    User
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    +====1 +1:333,339c +
    petId (required)
    + +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    + +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    +2:454c +3:424c +
    body (optional)
    +====1 +1:341c +
    Form Parameter — Updated status of the pet
    +2:456c +3:426c +
    Body Parameter — Created user object
    +====1 +1:353,354c +
    delete: /pet/{petId}
    +
    deletePet Deletes a pet
    +2:468,470c +3:438,440c +
    post: /user/createWithArray
    +
    User
    +
    createUsersWithArrayInput Creates list of users with given input array
    +====1 +1:359,362c +
    api_key (optional)
    + +
    Header Parameter
    +
    petId (required)
    +2:475c +3:445c +
    body (optional)
    +====1 +1:364c +
    Path Parameter — Pet id to delete
    +2:477c +3:447c +
    Body Parameter — List of user object
    +====1 +1:376,377c +
    post: /pet/{petId}/uploadImage
    +
    uploadFile uploads an image
    +2:489,491c +3:459,461c +
    post: /user/createWithList
    +
    User
    +
    createUsersWithListInput Creates list of users with given input array
    +====1 +1:382,388c +
    petId (required)
    + +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    +2:496c +3:466c +
    body (optional)
    +====1 +1:390c +
    Form Parameter — file to upload
    +2:498c +3:468c +
    Body Parameter — List of user object
    +====1 +1:401,402c + + +2:508a +3:478a +====1 +1:404,406c +
    get: /store/inventory
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +2:510,513c +3:480,483c +
    get: /user/login
    +
    User
    +
    loginUser Logs user into the system
    +
    +====1 +1:409a +2:517,522c +3:487,492c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    + +
    Query Parameter — The password for login in clear text
    +====1 +1:414c + +2:527c +3:497c + +====1 +1:419c +
    {\n  "key" : 123\n}
    +2:532c +3:502c +
    "aeiou"
    +====1 +1:423c +
    not implemented com.wordnik.swagger.models.properties.MapProperty@2acca551
    +2:536c +3:506c +
    string
    +====1 +1:429,430c +
    post: /store/order
    +
    placeOrder Place an order for a pet
    +2:542,544c +3:512,514c +
    get: /user/logout
    +
    User
    +
    logoutUser Logs out current logged in user session
    +====1 +1:435,437c +
    body (optional)
    + +
    Body Parameter — order placed for purchasing the pet
    +2:548a +3:518a +====1 +1:442c + +2:553c +3:523c +
    +====1 +1:445,452c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.855+0000"\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.857Z\n  string\n  true\n
    + +2:555a +3:525a +====1 +1:457,459c +
    get: /store/order/{orderId}
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +2:560,563c +3:530,533c +
    get: /user/{username}
    +
    User
    +
    getUserByName Get user by user name
    +
    +====1 +1:463c +
    orderId (required)
    +2:567c +3:537c +
    username (required)
    +====1 +1:465c +
    Path Parameter — ID of pet that needs to be fetched
    +2:569c +3:539c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +====1 +1:470c + +2:574c +3:544c + +====1 +1:475c +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.859+0000"\n}
    +2:579,588c +3:549,558c +
    {
    +    "id" : 1,
    +    "username" : "johnp",
    +    "firstName" : "John",
    +    "lastName" : "Public",
    +    "email" : "johnp@swagger.io",
    +    "password" : "-secret-",
    +    "phone" : "0123456789",
    +    "userStatus" : 0
    +  }
    +====1 +1:477,479c +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.859Z\n  string\n  true\n
    +2:590,612c +3:560,582c +
    +
    + +
    +
    put: /user/{username}
    +
    User
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    username (required)
    + +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    + +
    Body Parameter — Updated user object
    + +
    +

    Return type

    + +
    + +====1 +1:485,487c +
    delete: /store/order/{orderId}
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +2:618,621c +3:588,591c +
    delete: /user/{username}
    +
    User
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +====1 +1:491c +
    orderId (required)
    +2:625c +3:595c +
    username (required)
    +====1 +1:493c +
    Path Parameter — ID of the order that needs to be deleted
    +2:627c +3:597c +
    Path Parameter — The name that needs to be deleted
    +====1 +1:633a +2:768c +3:738c + margin-bottom: 2px; +====1 +1:714a +2:850,862c +3:820,832c + .method-tags { + text-align: right; + } + + .method-tag { + background: none repeat scroll 0% 0% #24A600; + border-radius: 3px; + padding: 2px 10px; + margin: 2px; + color: #FFF; + display: inline-block; + text-decoration: none; + } diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort/diff_pom.mustache.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort/diff_pom.mustache.txt new file mode 100644 index 0000000000..2accbb8523 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort/diff_pom.mustache.txt @@ -0,0 +1,36 @@ +====1 +1:64a +2:65,83c +3:65,83c + + org.codehaus.mojo + build-helper-maven-plugin + 1.9.1 + + + add-source + generate-sources + + add-source + + + + src/gen/java + + + + + +==== +1:132c + 1.5.0-M2 +2:151,157c + <<<<<<< HEAD + 1.5.2-M2 + ||||||| c04f947a85 + 1.5.0-M2 + ======= + 2.1.0-M2 + >>>>>>> TEMP_RIGHT_BRANCH +3:151c + 1.5.2-M2 diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort/diff_pom.xml.txt new file mode 100644 index 0000000000..4b3be7e200 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort/diff_pom.xml.txt @@ -0,0 +1,13 @@ +==== +1:132c + 1.5.0-M2 +2:132,138c + <<<<<<< HEAD + 1.5.2-M2 + ||||||| c04f947a85 + 1.5.0-M2 + ======= + 2.1.0-M2 + >>>>>>> TEMP_RIGHT_BRANCH +3:132c + 1.5.2-M2 diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_adjacent/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_adjacent/diff_README.md.txt new file mode 100644 index 0000000000..209f764bb2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_adjacent/diff_README.md.txt @@ -0,0 +1,21 @@ +====1 +1:21c + 1.5.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +2:21c +3:21c + 2.1.1-M2-SNAPSHOT | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +====1 +1:152a +2:153c +3:153c + PerlClientCodegen.java +====1 +1:153a +2:155c +3:155c + Python3ClientCodegen.java +====1 +1:154a +2:157c +3:157c + Qt5CPPGenerator.java diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_adjacent/diff_index.html.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_adjacent/diff_index.html.txt new file mode 100644 index 0000000000..d32d62bf78 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_adjacent/diff_index.html.txt @@ -0,0 +1,790 @@ +====1 +1:5c + +2:4a +3:4a +====1 +1:22,43c +
    post: /user
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    body (optional)
    + +
    Body Parameter — Created user object
    + +
    +

    Return type

    + +
    + + +
    +
    + +
    +
    post: /user/createWithArray
    +
    createUsersWithArrayInput Creates list of users with given input array
    +2:21,23c +3:21,23c +
    put: /pet
    +
    Pet
    +
    updatePet Update an existing pet
    +====1 +1:50c +
    Body Parameter — List of user object
    +2:30c +3:30c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:62,63c +
    post: /user/createWithList
    +
    createUsersWithListInput Creates list of users with given input array
    +2:42,44c +3:42,44c +
    post: /pet
    +
    Pet
    +
    addPet Add a new pet to the store
    +====1 +1:70c +
    Body Parameter — List of user object
    +2:51c +3:51c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:82,84c +
    get: /user/login
    +
    loginUser Logs user into the system
    +
    +2:63,66c +3:63,66c +
    get: /pet/findByStatus
    +
    Pet
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +====1 +1:88,91c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    +2:70c +3:70c +
    status (optional)
    +====1 +1:93c +
    Query Parameter — The password for login in clear text
    +2:72c +3:72c +
    Query Parameter — Status values that need to be considered for filter default: available
    +====1 +1:98c + +2:77c +3:77c + +====1 +1:103c +
    "aeiou"
    +2:82,95c +3:82,95c +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    +====1 +1:107c +
    string
    +2:99,104c +3:99,104c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:113,115c +
    get: /user/logout
    +
    logoutUser Logs out current logged in user session
    +
    +2:110,113c +3:110,113c +
    get: /pet/findByTags
    +
    Pet
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +====1 +1:118a +2:117,119c +3:117,119c +
    tags (optional)
    + +
    Query Parameter — Tags to filter by
    +====1 +1:123c +
    +2:124c +3:124c + +====1 +1:125a +2:127,152c +3:127,152c +

    Example data

    +
    Content-Type: application/json
    +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    + +

    Example data

    +
    Content-Type: application/xml
    +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    + +====1 +1:130,132c +
    get: /user/{username}
    +
    getUserByName Get user by user name
    +
    +2:157,160c +3:157,160c +
    get: /pet/{petId}
    +
    Pet
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +====1 +1:136c +
    username (required)
    +2:164c +3:164c +
    petId (required)
    +====1 +1:138c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +2:166c +3:166c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:143c + +2:171c +3:171c + +====1 +1:148c +
    {\n  "id" : 123456789,\n  "lastName" : "aeiou",\n  "phone" : "aeiou",\n  "username" : "aeiou",\n  "email" : "aeiou",\n  "userStatus" : 123,\n  "firstName" : "aeiou",\n  "password" : "aeiou"\n}
    +2:176,189c +3:176,189c +
    {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  }
    +====1 +1:152c +
    \n  123456\n  string\n  string\n  string\n  string\n  string\n  string\n  0\n
    +2:193,198c +3:193,198c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:158,160c +
    put: /user/{username}
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    +2:204,207c +3:204,207c +
    post: /pet/{petId}
    +
    Pet
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +====1 +1:164c +
    username (required)
    +2:211c +3:211c +
    petId (required)
    +====1 +1:166,167c +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    +2:213,214c +3:213,214c +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    +====1 +1:169c +
    Body Parameter — Updated user object
    +2:216,219c +3:216,219c +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    + +
    Form Parameter — Updated status of the pet
    +====1 +1:181,183c +
    delete: /user/{username}
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +2:231,234c +3:231,234c +
    delete: /pet/{petId}
    +
    Pet
    +
    deletePet Deletes a pet
    +
    +====1 +1:187c +
    username (required)
    +2:238c +3:238c +
    api_key (optional)
    +====1 +1:189c +
    Path Parameter — The name that needs to be deleted
    +2:240,243c +3:240,243c +
    Header Parameter
    +
    petId (required)
    + +
    Path Parameter — Pet id to delete
    +====1 +1:200,201c + + +2:253a +3:253a +====1 +1:203,204c +
    put: /pet
    +
    updatePet Update an existing pet
    +2:255,257c +3:255,257c +
    post: /pet/{petId}/uploadImage
    +
    Pet
    +
    uploadFile uploads an image
    +====1 +1:209c +
    body (optional)
    +2:262c +3:262c +
    petId (required)
    +====1 +1:211c +
    Body Parameter — Pet object that needs to be added to the store
    +2:264,270c +3:264,270c +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    + +
    Form Parameter — file to upload
    +====1 +1:223,225c +
    post: /pet
    +
    addPet Add a new pet to the store
    +
    +2:282,285c +3:282,285c +
    get: /store/inventory
    +
    Store
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +====1 +1:229,231c +
    body (optional)
    + +
    Body Parameter — Pet object that needs to be added to the store
    +2:288a +3:288a +====1 +1:236c +
    +2:293c +3:293c + +====1 +1:238a +2:296,305c +3:296,305c +

    Example data

    +
    Content-Type: application/json
    +
    {
    +    "key" : 123
    +  }
    + +

    Example data

    +
    Content-Type: application/xml
    +
    not implemented com.wordnik.swagger.models.properties.MapProperty@3e
    + +====1 +1:243,245c +
    get: /pet/findByStatus
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +2:310,313c +3:310,313c +
    post: /store/order
    +
    Store
    +
    placeOrder Place an order for a pet
    +
    +====1 +1:249c +
    status (optional)
    +2:317c +3:317c +
    body (optional)
    +====1 +1:251c +
    Query Parameter — Status values that need to be considered for filter
    +2:319c +3:319c +
    Body Parameter — order placed for purchasing the pet
    +====1 +1:256c + +2:324c +3:324c + +====1 +1:261c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:329,336c +3:329,336c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.118+0000"
    +  }
    +====1 +1:265c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:340,347c +3:340,347c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.120Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:271,273c +
    get: /pet/findByTags
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +2:353,356c +3:353,356c +
    get: /store/order/{orderId}
    +
    Store
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +====1 +1:277c +
    tags (optional)
    +2:360c +3:360c +
    orderId (required)
    +====1 +1:279c +
    Query Parameter — Tags to filter by
    +2:362c +3:362c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:284c + +2:367c +3:367c + +====1 +1:289c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:372,379c +3:372,379c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.121+0000"
    +  }
    +====1 +1:293c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:383,390c +3:383,390c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.122Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:299,301c +
    get: /pet/{petId}
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +2:396,399c +3:396,399c +
    delete: /store/order/{orderId}
    +
    Store
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +====1 +1:305c +
    petId (required)
    +2:403c +3:403c +
    orderId (required)
    +====1 +1:307c +
    Path Parameter — ID of pet that needs to be fetched
    +2:405c +3:405c +
    Path Parameter — ID of the order that needs to be deleted
    +====1 +1:312c + +2:410c +3:410c +
    +====1 +1:315,322c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    + +2:412a +3:412a +====1 +1:327,329c +
    post: /pet/{petId}
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +2:417,420c +3:417,420c +
    post: /user
    +
    User
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    +====1 +1:333,339c +
    petId (required)
    + +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    + +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    +2:424c +3:424c +
    body (optional)
    +====1 +1:341c +
    Form Parameter — Updated status of the pet
    +2:426c +3:426c +
    Body Parameter — Created user object
    +====1 +1:353,354c +
    delete: /pet/{petId}
    +
    deletePet Deletes a pet
    +2:438,440c +3:438,440c +
    post: /user/createWithArray
    +
    User
    +
    createUsersWithArrayInput Creates list of users with given input array
    +====1 +1:359,362c +
    api_key (optional)
    + +
    Header Parameter
    +
    petId (required)
    +2:445c +3:445c +
    body (optional)
    +====1 +1:364c +
    Path Parameter — Pet id to delete
    +2:447c +3:447c +
    Body Parameter — List of user object
    +====1 +1:376,377c +
    post: /pet/{petId}/uploadImage
    +
    uploadFile uploads an image
    +2:459,461c +3:459,461c +
    post: /user/createWithList
    +
    User
    +
    createUsersWithListInput Creates list of users with given input array
    +====1 +1:382,388c +
    petId (required)
    + +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    +2:466c +3:466c +
    body (optional)
    +====1 +1:390c +
    Form Parameter — file to upload
    +2:468c +3:468c +
    Body Parameter — List of user object
    +====1 +1:401,402c + + +2:478a +3:478a +====1 +1:404,406c +
    get: /store/inventory
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +2:480,483c +3:480,483c +
    get: /user/login
    +
    User
    +
    loginUser Logs user into the system
    +
    +====1 +1:409a +2:487,492c +3:487,492c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    + +
    Query Parameter — The password for login in clear text
    +====1 +1:414c + +2:497c +3:497c + +====1 +1:419c +
    {\n  "key" : 123\n}
    +2:502c +3:502c +
    "aeiou"
    +====1 +1:423c +
    not implemented com.wordnik.swagger.models.properties.MapProperty@2acca551
    +2:506c +3:506c +
    string
    +====1 +1:429,430c +
    post: /store/order
    +
    placeOrder Place an order for a pet
    +2:512,514c +3:512,514c +
    get: /user/logout
    +
    User
    +
    logoutUser Logs out current logged in user session
    +====1 +1:435,437c +
    body (optional)
    + +
    Body Parameter — order placed for purchasing the pet
    +2:518a +3:518a +====1 +1:442c + +2:523c +3:523c +
    +====1 +1:445,452c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.855+0000"\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.857Z\n  string\n  true\n
    + +2:525a +3:525a +====1 +1:457,459c +
    get: /store/order/{orderId}
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +2:530,533c +3:530,533c +
    get: /user/{username}
    +
    User
    +
    getUserByName Get user by user name
    +
    +====1 +1:463c +
    orderId (required)
    +2:537c +3:537c +
    username (required)
    +====1 +1:465c +
    Path Parameter — ID of pet that needs to be fetched
    +2:539c +3:539c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +====1 +1:470c + +2:544c +3:544c + +====1 +1:475c +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.859+0000"\n}
    +2:549,558c +3:549,558c +
    {
    +    "id" : 1,
    +    "username" : "johnp",
    +    "firstName" : "John",
    +    "lastName" : "Public",
    +    "email" : "johnp@swagger.io",
    +    "password" : "-secret-",
    +    "phone" : "0123456789",
    +    "userStatus" : 0
    +  }
    +====1 +1:477,479c +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.859Z\n  string\n  true\n
    +2:560,582c +3:560,582c +
    +
    + +
    +
    put: /user/{username}
    +
    User
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    username (required)
    + +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    + +
    Body Parameter — Updated user object
    + +
    +

    Return type

    + +
    + +====1 +1:485,487c +
    delete: /store/order/{orderId}
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +2:588,591c +3:588,591c +
    delete: /user/{username}
    +
    User
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +====1 +1:491c +
    orderId (required)
    +2:595c +3:595c +
    username (required)
    +====1 +1:493c +
    Path Parameter — ID of the order that needs to be deleted
    +2:597c +3:597c +
    Path Parameter — The name that needs to be deleted
    +====1 +1:633a +2:738c +3:738c + margin-bottom: 2px; +====1 +1:714a +2:820,832c +3:820,832c + .method-tags { + text-align: right; + } + + .method-tag { + background: none repeat scroll 0% 0% #24A600; + border-radius: 3px; + padding: 2px 10px; + margin: 2px; + color: #FFF; + display: inline-block; + text-decoration: none; + } diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_adjacent/diff_pom.mustache.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_adjacent/diff_pom.mustache.txt new file mode 100644 index 0000000000..980e2155c8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_adjacent/diff_pom.mustache.txt @@ -0,0 +1,29 @@ +====1 +1:64a +2:65,83c +3:65,83c + + org.codehaus.mojo + build-helper-maven-plugin + 1.9.1 + + + add-source + generate-sources + + add-source + + + + src/gen/java + + + + + +====1 +1:132c + 1.5.0-M2 +2:151c +3:151c + 1.5.2-M2 diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_adjacent/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_adjacent/diff_pom.xml.txt new file mode 100644 index 0000000000..39777c50de --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_adjacent/diff_pom.xml.txt @@ -0,0 +1,6 @@ +====1 +1:132c + 1.5.0-M2 +2:132c +3:132c + 1.5.2-M2 diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_ignorespace/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_ignorespace/diff_README.md.txt new file mode 100644 index 0000000000..6e7838f2c6 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_ignorespace/diff_README.md.txt @@ -0,0 +1,28 @@ +==== +1:21c + 1.5.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +2:21,27c + <<<<<<< HEAD + 2.1.1-M2-SNAPSHOT | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) + ||||||| c04f947a85 + 1.5.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) + ======= + 2.1.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) + >>>>>>> TEMP_RIGHT_BRANCH +3:21c + 2.1.1-M2-SNAPSHOT | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +====1 +1:152a +2:159c +3:153c + PerlClientCodegen.java +====1 +1:153a +2:161c +3:155c + Python3ClientCodegen.java +====1 +1:154a +2:163c +3:157c + Qt5CPPGenerator.java diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_ignorespace/diff_index.html.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_ignorespace/diff_index.html.txt new file mode 100644 index 0000000000..5d2de91649 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_ignorespace/diff_index.html.txt @@ -0,0 +1,862 @@ +====1 +1:5c + +2:4a +3:4a +====1 +1:22,43c +
    post: /user
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    body (optional)
    + +
    Body Parameter — Created user object
    + +
    +

    Return type

    + +
    + + +
    +
    + +
    +
    post: /user/createWithArray
    +
    createUsersWithArrayInput Creates list of users with given input array
    +2:21,23c +3:21,23c +
    put: /pet
    +
    Pet
    +
    updatePet Update an existing pet
    +====1 +1:50c +
    Body Parameter — List of user object
    +2:30c +3:30c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:62,63c +
    post: /user/createWithList
    +
    createUsersWithListInput Creates list of users with given input array
    +2:42,44c +3:42,44c +
    post: /pet
    +
    Pet
    +
    addPet Add a new pet to the store
    +====1 +1:70c +
    Body Parameter — List of user object
    +2:51c +3:51c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:82,84c +
    get: /user/login
    +
    loginUser Logs user into the system
    +
    +2:63,66c +3:63,66c +
    get: /pet/findByStatus
    +
    Pet
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +====1 +1:88,91c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    +2:70c +3:70c +
    status (optional)
    +====1 +1:93c +
    Query Parameter — The password for login in clear text
    +2:72c +3:72c +
    Query Parameter — Status values that need to be considered for filter default: available
    +====1 +1:98c + +2:77c +3:77c + +====1 +1:103c +
    "aeiou"
    +2:82,95c +3:82,95c +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    +====1 +1:107c +
    string
    +2:99,104c +3:99,104c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:113,115c +
    get: /user/logout
    +
    logoutUser Logs out current logged in user session
    +
    +2:110,113c +3:110,113c +
    get: /pet/findByTags
    +
    Pet
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +====1 +1:118a +2:117,119c +3:117,119c +
    tags (optional)
    + +
    Query Parameter — Tags to filter by
    +====1 +1:123c +
    +2:124c +3:124c + +====1 +1:125a +2:127,152c +3:127,152c +

    Example data

    +
    Content-Type: application/json
    +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    + +

    Example data

    +
    Content-Type: application/xml
    +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    + +====1 +1:130,132c +
    get: /user/{username}
    +
    getUserByName Get user by user name
    +
    +2:157,160c +3:157,160c +
    get: /pet/{petId}
    +
    Pet
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +====1 +1:136c +
    username (required)
    +2:164c +3:164c +
    petId (required)
    +====1 +1:138c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +2:166c +3:166c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:143c + +2:171c +3:171c + +====1 +1:148c +
    {\n  "id" : 123456789,\n  "lastName" : "aeiou",\n  "phone" : "aeiou",\n  "username" : "aeiou",\n  "email" : "aeiou",\n  "userStatus" : 123,\n  "firstName" : "aeiou",\n  "password" : "aeiou"\n}
    +2:176,189c +3:176,189c +
    {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  }
    +====1 +1:152c +
    \n  123456\n  string\n  string\n  string\n  string\n  string\n  string\n  0\n
    +2:193,198c +3:193,198c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:158,160c +
    put: /user/{username}
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    +2:204,207c +3:204,207c +
    post: /pet/{petId}
    +
    Pet
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +====1 +1:164c +
    username (required)
    +2:211c +3:211c +
    petId (required)
    +====1 +1:166,167c +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    +2:213,214c +3:213,214c +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    +====1 +1:169c +
    Body Parameter — Updated user object
    +2:216,219c +3:216,219c +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    + +
    Form Parameter — Updated status of the pet
    +====1 +1:181,183c +
    delete: /user/{username}
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +2:231,234c +3:231,234c +
    delete: /pet/{petId}
    +
    Pet
    +
    deletePet Deletes a pet
    +
    +====1 +1:187c +
    username (required)
    +2:238c +3:238c +
    api_key (optional)
    +====1 +1:189c +
    Path Parameter — The name that needs to be deleted
    +2:240,243c +3:240,243c +
    Header Parameter
    +
    petId (required)
    + +
    Path Parameter — Pet id to delete
    +====1 +1:200,201c + + +2:253a +3:253a +====1 +1:203,204c +
    put: /pet
    +
    updatePet Update an existing pet
    +2:255,257c +3:255,257c +
    post: /pet/{petId}/uploadImage
    +
    Pet
    +
    uploadFile uploads an image
    +====1 +1:209c +
    body (optional)
    +2:262c +3:262c +
    petId (required)
    +====1 +1:211c +
    Body Parameter — Pet object that needs to be added to the store
    +2:264,270c +3:264,270c +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    + +
    Form Parameter — file to upload
    +====1 +1:223,225c +
    post: /pet
    +
    addPet Add a new pet to the store
    +
    +2:282,285c +3:282,285c +
    get: /store/inventory
    +
    Store
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +====1 +1:229,231c +
    body (optional)
    + +
    Body Parameter — Pet object that needs to be added to the store
    +2:288a +3:288a +====1 +1:236c +
    +2:293c +3:293c + +==== +1:238a +2:296,311c +

    Example data

    +
    Content-Type: application/json
    +
    {
    +    "key" : 123
    +  }
    + +

    Example data

    +
    Content-Type: application/xml
    + <<<<<<< HEAD +
    not implemented com.wordnik.swagger.models.properties.MapProperty@3e
    + ||||||| c04f947a85 +
    not implemented com.wordnik.swagger.models.properties.MapProperty@2acca551
    + ======= +
    not implemented com.wordnik.swagger.models.properties.MapProperty@787b217
    + >>>>>>> TEMP_RIGHT_BRANCH + +3:296,305c +

    Example data

    +
    Content-Type: application/json
    +
    {
    +    "key" : 123
    +  }
    + +

    Example data

    +
    Content-Type: application/xml
    +
    not implemented com.wordnik.swagger.models.properties.MapProperty@3e
    + +====1 +1:243,245c +
    get: /pet/findByStatus
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +2:316,319c +3:310,313c +
    post: /store/order
    +
    Store
    +
    placeOrder Place an order for a pet
    +
    +====1 +1:249c +
    status (optional)
    +2:323c +3:317c +
    body (optional)
    +====1 +1:251c +
    Query Parameter — Status values that need to be considered for filter
    +2:325c +3:319c +
    Body Parameter — order placed for purchasing the pet
    +====1 +1:256c + +2:330c +3:324c + +==== +1:261c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:335,348c + <<<<<<< HEAD +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.118+0000"
    +  }
    + ||||||| c04f947a85 +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.855+0000"\n}
    + ======= +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-06T14:06:47.931+0000"\n}
    + >>>>>>> TEMP_RIGHT_BRANCH +3:329,336c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.118+0000"
    +  }
    +==== +1:265c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:352,365c + <<<<<<< HEAD +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.120Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    + ||||||| c04f947a85 +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.857Z\n  string\n  true\n
    + ======= +
    \n  123456\n  123456\n  0\n  2015-04-06T08:06:47.934Z\n  string\n  true\n
    + >>>>>>> TEMP_RIGHT_BRANCH +3:340,347c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.120Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:271,273c +
    get: /pet/findByTags
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +2:371,374c +3:353,356c +
    get: /store/order/{orderId}
    +
    Store
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +====1 +1:277c +
    tags (optional)
    +2:378c +3:360c +
    orderId (required)
    +====1 +1:279c +
    Query Parameter — Tags to filter by
    +2:380c +3:362c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:284c + +2:385c +3:367c + +==== +1:289c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:390,403c + <<<<<<< HEAD +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.121+0000"
    +  }
    + ||||||| c04f947a85 +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.859+0000"\n}
    + ======= +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-06T14:06:47.935+0000"\n}
    + >>>>>>> TEMP_RIGHT_BRANCH +3:372,379c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.121+0000"
    +  }
    +==== +1:293c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:407,420c + <<<<<<< HEAD +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.122Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    + ||||||| c04f947a85 +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.859Z\n  string\n  true\n
    + ======= +
    \n  123456\n  123456\n  0\n  2015-04-06T08:06:47.935Z\n  string\n  true\n
    + >>>>>>> TEMP_RIGHT_BRANCH +3:383,390c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.122Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:299,301c +
    get: /pet/{petId}
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +2:426,429c +3:396,399c +
    delete: /store/order/{orderId}
    +
    Store
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +====1 +1:305c +
    petId (required)
    +2:433c +3:403c +
    orderId (required)
    +====1 +1:307c +
    Path Parameter — ID of pet that needs to be fetched
    +2:435c +3:405c +
    Path Parameter — ID of the order that needs to be deleted
    +====1 +1:312c + +2:440c +3:410c +
    +====1 +1:315,322c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    + +2:442a +3:412a +====1 +1:327,329c +
    post: /pet/{petId}
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +2:447,450c +3:417,420c +
    post: /user
    +
    User
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    +====1 +1:333,339c +
    petId (required)
    + +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    + +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    +2:454c +3:424c +
    body (optional)
    +====1 +1:341c +
    Form Parameter — Updated status of the pet
    +2:456c +3:426c +
    Body Parameter — Created user object
    +====1 +1:353,354c +
    delete: /pet/{petId}
    +
    deletePet Deletes a pet
    +2:468,470c +3:438,440c +
    post: /user/createWithArray
    +
    User
    +
    createUsersWithArrayInput Creates list of users with given input array
    +====1 +1:359,362c +
    api_key (optional)
    + +
    Header Parameter
    +
    petId (required)
    +2:475c +3:445c +
    body (optional)
    +====1 +1:364c +
    Path Parameter — Pet id to delete
    +2:477c +3:447c +
    Body Parameter — List of user object
    +====1 +1:376,377c +
    post: /pet/{petId}/uploadImage
    +
    uploadFile uploads an image
    +2:489,491c +3:459,461c +
    post: /user/createWithList
    +
    User
    +
    createUsersWithListInput Creates list of users with given input array
    +====1 +1:382,388c +
    petId (required)
    + +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    +2:496c +3:466c +
    body (optional)
    +====1 +1:390c +
    Form Parameter — file to upload
    +2:498c +3:468c +
    Body Parameter — List of user object
    +====1 +1:401,402c + + +2:508a +3:478a +====1 +1:404,406c +
    get: /store/inventory
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +2:510,513c +3:480,483c +
    get: /user/login
    +
    User
    +
    loginUser Logs user into the system
    +
    +====1 +1:409a +2:517,522c +3:487,492c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    + +
    Query Parameter — The password for login in clear text
    +====1 +1:414c + +2:527c +3:497c + +====1 +1:419c +
    {\n  "key" : 123\n}
    +2:532c +3:502c +
    "aeiou"
    +====1 +1:423c +
    not implemented com.wordnik.swagger.models.properties.MapProperty@2acca551
    +2:536c +3:506c +
    string
    +====1 +1:429,430c +
    post: /store/order
    +
    placeOrder Place an order for a pet
    +2:542,544c +3:512,514c +
    get: /user/logout
    +
    User
    +
    logoutUser Logs out current logged in user session
    +====1 +1:435,437c +
    body (optional)
    + +
    Body Parameter — order placed for purchasing the pet
    +2:548a +3:518a +====1 +1:442c + +2:553c +3:523c +
    +====1 +1:445,452c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.855+0000"\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.857Z\n  string\n  true\n
    + +2:555a +3:525a +====1 +1:457,459c +
    get: /store/order/{orderId}
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +2:560,563c +3:530,533c +
    get: /user/{username}
    +
    User
    +
    getUserByName Get user by user name
    +
    +====1 +1:463c +
    orderId (required)
    +2:567c +3:537c +
    username (required)
    +====1 +1:465c +
    Path Parameter — ID of pet that needs to be fetched
    +2:569c +3:539c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +====1 +1:470c + +2:574c +3:544c + +====1 +1:475c +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.859+0000"\n}
    +2:579,588c +3:549,558c +
    {
    +    "id" : 1,
    +    "username" : "johnp",
    +    "firstName" : "John",
    +    "lastName" : "Public",
    +    "email" : "johnp@swagger.io",
    +    "password" : "-secret-",
    +    "phone" : "0123456789",
    +    "userStatus" : 0
    +  }
    +====1 +1:477,479c +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.859Z\n  string\n  true\n
    +2:590,612c +3:560,582c +
    +
    + +
    +
    put: /user/{username}
    +
    User
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    username (required)
    + +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    + +
    Body Parameter — Updated user object
    + +
    +

    Return type

    + +
    + +====1 +1:485,487c +
    delete: /store/order/{orderId}
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +2:618,621c +3:588,591c +
    delete: /user/{username}
    +
    User
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +====1 +1:491c +
    orderId (required)
    +2:625c +3:595c +
    username (required)
    +====1 +1:493c +
    Path Parameter — ID of the order that needs to be deleted
    +2:627c +3:597c +
    Path Parameter — The name that needs to be deleted
    +====1 +1:633a +2:768c +3:738c + margin-bottom: 2px; +====1 +1:714a +2:850,862c +3:820,832c + .method-tags { + text-align: right; + } + + .method-tag { + background: none repeat scroll 0% 0% #24A600; + border-radius: 3px; + padding: 2px 10px; + margin: 2px; + color: #FFF; + display: inline-block; + text-decoration: none; + } diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_ignorespace/diff_pom.mustache.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_ignorespace/diff_pom.mustache.txt new file mode 100644 index 0000000000..2accbb8523 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_ignorespace/diff_pom.mustache.txt @@ -0,0 +1,36 @@ +====1 +1:64a +2:65,83c +3:65,83c + + org.codehaus.mojo + build-helper-maven-plugin + 1.9.1 + + + add-source + generate-sources + + add-source + + + + src/gen/java + + + + + +==== +1:132c + 1.5.0-M2 +2:151,157c + <<<<<<< HEAD + 1.5.2-M2 + ||||||| c04f947a85 + 1.5.0-M2 + ======= + 2.1.0-M2 + >>>>>>> TEMP_RIGHT_BRANCH +3:151c + 1.5.2-M2 diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..4b3be7e200 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_ignorespace/diff_pom.xml.txt @@ -0,0 +1,13 @@ +==== +1:132c + 1.5.0-M2 +2:132,138c + <<<<<<< HEAD + 1.5.2-M2 + ||||||| c04f947a85 + 1.5.0-M2 + ======= + 2.1.0-M2 + >>>>>>> TEMP_RIGHT_BRANCH +3:132c + 1.5.2-M2 diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports/diff_README.md.txt new file mode 100644 index 0000000000..209f764bb2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports/diff_README.md.txt @@ -0,0 +1,21 @@ +====1 +1:21c + 1.5.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +2:21c +3:21c + 2.1.1-M2-SNAPSHOT | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +====1 +1:152a +2:153c +3:153c + PerlClientCodegen.java +====1 +1:153a +2:155c +3:155c + Python3ClientCodegen.java +====1 +1:154a +2:157c +3:157c + Qt5CPPGenerator.java diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports/diff_index.html.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports/diff_index.html.txt new file mode 100644 index 0000000000..d32d62bf78 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports/diff_index.html.txt @@ -0,0 +1,790 @@ +====1 +1:5c + +2:4a +3:4a +====1 +1:22,43c +
    post: /user
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    body (optional)
    + +
    Body Parameter — Created user object
    + +
    +

    Return type

    + +
    + + +
    +
    + +
    +
    post: /user/createWithArray
    +
    createUsersWithArrayInput Creates list of users with given input array
    +2:21,23c +3:21,23c +
    put: /pet
    +
    Pet
    +
    updatePet Update an existing pet
    +====1 +1:50c +
    Body Parameter — List of user object
    +2:30c +3:30c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:62,63c +
    post: /user/createWithList
    +
    createUsersWithListInput Creates list of users with given input array
    +2:42,44c +3:42,44c +
    post: /pet
    +
    Pet
    +
    addPet Add a new pet to the store
    +====1 +1:70c +
    Body Parameter — List of user object
    +2:51c +3:51c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:82,84c +
    get: /user/login
    +
    loginUser Logs user into the system
    +
    +2:63,66c +3:63,66c +
    get: /pet/findByStatus
    +
    Pet
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +====1 +1:88,91c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    +2:70c +3:70c +
    status (optional)
    +====1 +1:93c +
    Query Parameter — The password for login in clear text
    +2:72c +3:72c +
    Query Parameter — Status values that need to be considered for filter default: available
    +====1 +1:98c + +2:77c +3:77c + +====1 +1:103c +
    "aeiou"
    +2:82,95c +3:82,95c +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    +====1 +1:107c +
    string
    +2:99,104c +3:99,104c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:113,115c +
    get: /user/logout
    +
    logoutUser Logs out current logged in user session
    +
    +2:110,113c +3:110,113c +
    get: /pet/findByTags
    +
    Pet
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +====1 +1:118a +2:117,119c +3:117,119c +
    tags (optional)
    + +
    Query Parameter — Tags to filter by
    +====1 +1:123c +
    +2:124c +3:124c + +====1 +1:125a +2:127,152c +3:127,152c +

    Example data

    +
    Content-Type: application/json
    +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    + +

    Example data

    +
    Content-Type: application/xml
    +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    + +====1 +1:130,132c +
    get: /user/{username}
    +
    getUserByName Get user by user name
    +
    +2:157,160c +3:157,160c +
    get: /pet/{petId}
    +
    Pet
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +====1 +1:136c +
    username (required)
    +2:164c +3:164c +
    petId (required)
    +====1 +1:138c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +2:166c +3:166c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:143c + +2:171c +3:171c + +====1 +1:148c +
    {\n  "id" : 123456789,\n  "lastName" : "aeiou",\n  "phone" : "aeiou",\n  "username" : "aeiou",\n  "email" : "aeiou",\n  "userStatus" : 123,\n  "firstName" : "aeiou",\n  "password" : "aeiou"\n}
    +2:176,189c +3:176,189c +
    {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  }
    +====1 +1:152c +
    \n  123456\n  string\n  string\n  string\n  string\n  string\n  string\n  0\n
    +2:193,198c +3:193,198c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:158,160c +
    put: /user/{username}
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    +2:204,207c +3:204,207c +
    post: /pet/{petId}
    +
    Pet
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +====1 +1:164c +
    username (required)
    +2:211c +3:211c +
    petId (required)
    +====1 +1:166,167c +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    +2:213,214c +3:213,214c +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    +====1 +1:169c +
    Body Parameter — Updated user object
    +2:216,219c +3:216,219c +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    + +
    Form Parameter — Updated status of the pet
    +====1 +1:181,183c +
    delete: /user/{username}
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +2:231,234c +3:231,234c +
    delete: /pet/{petId}
    +
    Pet
    +
    deletePet Deletes a pet
    +
    +====1 +1:187c +
    username (required)
    +2:238c +3:238c +
    api_key (optional)
    +====1 +1:189c +
    Path Parameter — The name that needs to be deleted
    +2:240,243c +3:240,243c +
    Header Parameter
    +
    petId (required)
    + +
    Path Parameter — Pet id to delete
    +====1 +1:200,201c + + +2:253a +3:253a +====1 +1:203,204c +
    put: /pet
    +
    updatePet Update an existing pet
    +2:255,257c +3:255,257c +
    post: /pet/{petId}/uploadImage
    +
    Pet
    +
    uploadFile uploads an image
    +====1 +1:209c +
    body (optional)
    +2:262c +3:262c +
    petId (required)
    +====1 +1:211c +
    Body Parameter — Pet object that needs to be added to the store
    +2:264,270c +3:264,270c +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    + +
    Form Parameter — file to upload
    +====1 +1:223,225c +
    post: /pet
    +
    addPet Add a new pet to the store
    +
    +2:282,285c +3:282,285c +
    get: /store/inventory
    +
    Store
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +====1 +1:229,231c +
    body (optional)
    + +
    Body Parameter — Pet object that needs to be added to the store
    +2:288a +3:288a +====1 +1:236c +
    +2:293c +3:293c + +====1 +1:238a +2:296,305c +3:296,305c +

    Example data

    +
    Content-Type: application/json
    +
    {
    +    "key" : 123
    +  }
    + +

    Example data

    +
    Content-Type: application/xml
    +
    not implemented com.wordnik.swagger.models.properties.MapProperty@3e
    + +====1 +1:243,245c +
    get: /pet/findByStatus
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +2:310,313c +3:310,313c +
    post: /store/order
    +
    Store
    +
    placeOrder Place an order for a pet
    +
    +====1 +1:249c +
    status (optional)
    +2:317c +3:317c +
    body (optional)
    +====1 +1:251c +
    Query Parameter — Status values that need to be considered for filter
    +2:319c +3:319c +
    Body Parameter — order placed for purchasing the pet
    +====1 +1:256c + +2:324c +3:324c + +====1 +1:261c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:329,336c +3:329,336c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.118+0000"
    +  }
    +====1 +1:265c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:340,347c +3:340,347c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.120Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:271,273c +
    get: /pet/findByTags
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +2:353,356c +3:353,356c +
    get: /store/order/{orderId}
    +
    Store
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +====1 +1:277c +
    tags (optional)
    +2:360c +3:360c +
    orderId (required)
    +====1 +1:279c +
    Query Parameter — Tags to filter by
    +2:362c +3:362c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:284c + +2:367c +3:367c + +====1 +1:289c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:372,379c +3:372,379c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.121+0000"
    +  }
    +====1 +1:293c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:383,390c +3:383,390c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.122Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:299,301c +
    get: /pet/{petId}
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +2:396,399c +3:396,399c +
    delete: /store/order/{orderId}
    +
    Store
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +====1 +1:305c +
    petId (required)
    +2:403c +3:403c +
    orderId (required)
    +====1 +1:307c +
    Path Parameter — ID of pet that needs to be fetched
    +2:405c +3:405c +
    Path Parameter — ID of the order that needs to be deleted
    +====1 +1:312c + +2:410c +3:410c +
    +====1 +1:315,322c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    + +2:412a +3:412a +====1 +1:327,329c +
    post: /pet/{petId}
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +2:417,420c +3:417,420c +
    post: /user
    +
    User
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    +====1 +1:333,339c +
    petId (required)
    + +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    + +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    +2:424c +3:424c +
    body (optional)
    +====1 +1:341c +
    Form Parameter — Updated status of the pet
    +2:426c +3:426c +
    Body Parameter — Created user object
    +====1 +1:353,354c +
    delete: /pet/{petId}
    +
    deletePet Deletes a pet
    +2:438,440c +3:438,440c +
    post: /user/createWithArray
    +
    User
    +
    createUsersWithArrayInput Creates list of users with given input array
    +====1 +1:359,362c +
    api_key (optional)
    + +
    Header Parameter
    +
    petId (required)
    +2:445c +3:445c +
    body (optional)
    +====1 +1:364c +
    Path Parameter — Pet id to delete
    +2:447c +3:447c +
    Body Parameter — List of user object
    +====1 +1:376,377c +
    post: /pet/{petId}/uploadImage
    +
    uploadFile uploads an image
    +2:459,461c +3:459,461c +
    post: /user/createWithList
    +
    User
    +
    createUsersWithListInput Creates list of users with given input array
    +====1 +1:382,388c +
    petId (required)
    + +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    +2:466c +3:466c +
    body (optional)
    +====1 +1:390c +
    Form Parameter — file to upload
    +2:468c +3:468c +
    Body Parameter — List of user object
    +====1 +1:401,402c + + +2:478a +3:478a +====1 +1:404,406c +
    get: /store/inventory
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +2:480,483c +3:480,483c +
    get: /user/login
    +
    User
    +
    loginUser Logs user into the system
    +
    +====1 +1:409a +2:487,492c +3:487,492c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    + +
    Query Parameter — The password for login in clear text
    +====1 +1:414c + +2:497c +3:497c + +====1 +1:419c +
    {\n  "key" : 123\n}
    +2:502c +3:502c +
    "aeiou"
    +====1 +1:423c +
    not implemented com.wordnik.swagger.models.properties.MapProperty@2acca551
    +2:506c +3:506c +
    string
    +====1 +1:429,430c +
    post: /store/order
    +
    placeOrder Place an order for a pet
    +2:512,514c +3:512,514c +
    get: /user/logout
    +
    User
    +
    logoutUser Logs out current logged in user session
    +====1 +1:435,437c +
    body (optional)
    + +
    Body Parameter — order placed for purchasing the pet
    +2:518a +3:518a +====1 +1:442c + +2:523c +3:523c +
    +====1 +1:445,452c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.855+0000"\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.857Z\n  string\n  true\n
    + +2:525a +3:525a +====1 +1:457,459c +
    get: /store/order/{orderId}
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +2:530,533c +3:530,533c +
    get: /user/{username}
    +
    User
    +
    getUserByName Get user by user name
    +
    +====1 +1:463c +
    orderId (required)
    +2:537c +3:537c +
    username (required)
    +====1 +1:465c +
    Path Parameter — ID of pet that needs to be fetched
    +2:539c +3:539c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +====1 +1:470c + +2:544c +3:544c + +====1 +1:475c +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.859+0000"\n}
    +2:549,558c +3:549,558c +
    {
    +    "id" : 1,
    +    "username" : "johnp",
    +    "firstName" : "John",
    +    "lastName" : "Public",
    +    "email" : "johnp@swagger.io",
    +    "password" : "-secret-",
    +    "phone" : "0123456789",
    +    "userStatus" : 0
    +  }
    +====1 +1:477,479c +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.859Z\n  string\n  true\n
    +2:560,582c +3:560,582c +
    +
    + +
    +
    put: /user/{username}
    +
    User
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    username (required)
    + +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    + +
    Body Parameter — Updated user object
    + +
    +

    Return type

    + +
    + +====1 +1:485,487c +
    delete: /store/order/{orderId}
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +2:588,591c +3:588,591c +
    delete: /user/{username}
    +
    User
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +====1 +1:491c +
    orderId (required)
    +2:595c +3:595c +
    username (required)
    +====1 +1:493c +
    Path Parameter — ID of the order that needs to be deleted
    +2:597c +3:597c +
    Path Parameter — The name that needs to be deleted
    +====1 +1:633a +2:738c +3:738c + margin-bottom: 2px; +====1 +1:714a +2:820,832c +3:820,832c + .method-tags { + text-align: right; + } + + .method-tag { + background: none repeat scroll 0% 0% #24A600; + border-radius: 3px; + padding: 2px 10px; + margin: 2px; + color: #FFF; + display: inline-block; + text-decoration: none; + } diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports/diff_pom.mustache.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports/diff_pom.mustache.txt new file mode 100644 index 0000000000..980e2155c8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports/diff_pom.mustache.txt @@ -0,0 +1,29 @@ +====1 +1:64a +2:65,83c +3:65,83c + + org.codehaus.mojo + build-helper-maven-plugin + 1.9.1 + + + add-source + generate-sources + + add-source + + + + src/gen/java + + + + + +====1 +1:132c + 1.5.0-M2 +2:151c +3:151c + 1.5.2-M2 diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports/diff_pom.xml.txt new file mode 100644 index 0000000000..39777c50de --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports/diff_pom.xml.txt @@ -0,0 +1,6 @@ +====1 +1:132c + 1.5.0-M2 +2:132c +3:132c + 1.5.2-M2 diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports_ignorespace/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports_ignorespace/diff_README.md.txt new file mode 100644 index 0000000000..209f764bb2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports_ignorespace/diff_README.md.txt @@ -0,0 +1,21 @@ +====1 +1:21c + 1.5.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +2:21c +3:21c + 2.1.1-M2-SNAPSHOT | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +====1 +1:152a +2:153c +3:153c + PerlClientCodegen.java +====1 +1:153a +2:155c +3:155c + Python3ClientCodegen.java +====1 +1:154a +2:157c +3:157c + Qt5CPPGenerator.java diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports_ignorespace/diff_index.html.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports_ignorespace/diff_index.html.txt new file mode 100644 index 0000000000..d32d62bf78 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports_ignorespace/diff_index.html.txt @@ -0,0 +1,790 @@ +====1 +1:5c + +2:4a +3:4a +====1 +1:22,43c +
    post: /user
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    body (optional)
    + +
    Body Parameter — Created user object
    + +
    +

    Return type

    + +
    + + +
    +
    + +
    +
    post: /user/createWithArray
    +
    createUsersWithArrayInput Creates list of users with given input array
    +2:21,23c +3:21,23c +
    put: /pet
    +
    Pet
    +
    updatePet Update an existing pet
    +====1 +1:50c +
    Body Parameter — List of user object
    +2:30c +3:30c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:62,63c +
    post: /user/createWithList
    +
    createUsersWithListInput Creates list of users with given input array
    +2:42,44c +3:42,44c +
    post: /pet
    +
    Pet
    +
    addPet Add a new pet to the store
    +====1 +1:70c +
    Body Parameter — List of user object
    +2:51c +3:51c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:82,84c +
    get: /user/login
    +
    loginUser Logs user into the system
    +
    +2:63,66c +3:63,66c +
    get: /pet/findByStatus
    +
    Pet
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +====1 +1:88,91c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    +2:70c +3:70c +
    status (optional)
    +====1 +1:93c +
    Query Parameter — The password for login in clear text
    +2:72c +3:72c +
    Query Parameter — Status values that need to be considered for filter default: available
    +====1 +1:98c + +2:77c +3:77c + +====1 +1:103c +
    "aeiou"
    +2:82,95c +3:82,95c +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    +====1 +1:107c +
    string
    +2:99,104c +3:99,104c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:113,115c +
    get: /user/logout
    +
    logoutUser Logs out current logged in user session
    +
    +2:110,113c +3:110,113c +
    get: /pet/findByTags
    +
    Pet
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +====1 +1:118a +2:117,119c +3:117,119c +
    tags (optional)
    + +
    Query Parameter — Tags to filter by
    +====1 +1:123c +
    +2:124c +3:124c + +====1 +1:125a +2:127,152c +3:127,152c +

    Example data

    +
    Content-Type: application/json
    +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    + +

    Example data

    +
    Content-Type: application/xml
    +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    + +====1 +1:130,132c +
    get: /user/{username}
    +
    getUserByName Get user by user name
    +
    +2:157,160c +3:157,160c +
    get: /pet/{petId}
    +
    Pet
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +====1 +1:136c +
    username (required)
    +2:164c +3:164c +
    petId (required)
    +====1 +1:138c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +2:166c +3:166c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:143c + +2:171c +3:171c + +====1 +1:148c +
    {\n  "id" : 123456789,\n  "lastName" : "aeiou",\n  "phone" : "aeiou",\n  "username" : "aeiou",\n  "email" : "aeiou",\n  "userStatus" : 123,\n  "firstName" : "aeiou",\n  "password" : "aeiou"\n}
    +2:176,189c +3:176,189c +
    {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  }
    +====1 +1:152c +
    \n  123456\n  string\n  string\n  string\n  string\n  string\n  string\n  0\n
    +2:193,198c +3:193,198c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:158,160c +
    put: /user/{username}
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    +2:204,207c +3:204,207c +
    post: /pet/{petId}
    +
    Pet
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +====1 +1:164c +
    username (required)
    +2:211c +3:211c +
    petId (required)
    +====1 +1:166,167c +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    +2:213,214c +3:213,214c +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    +====1 +1:169c +
    Body Parameter — Updated user object
    +2:216,219c +3:216,219c +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    + +
    Form Parameter — Updated status of the pet
    +====1 +1:181,183c +
    delete: /user/{username}
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +2:231,234c +3:231,234c +
    delete: /pet/{petId}
    +
    Pet
    +
    deletePet Deletes a pet
    +
    +====1 +1:187c +
    username (required)
    +2:238c +3:238c +
    api_key (optional)
    +====1 +1:189c +
    Path Parameter — The name that needs to be deleted
    +2:240,243c +3:240,243c +
    Header Parameter
    +
    petId (required)
    + +
    Path Parameter — Pet id to delete
    +====1 +1:200,201c + + +2:253a +3:253a +====1 +1:203,204c +
    put: /pet
    +
    updatePet Update an existing pet
    +2:255,257c +3:255,257c +
    post: /pet/{petId}/uploadImage
    +
    Pet
    +
    uploadFile uploads an image
    +====1 +1:209c +
    body (optional)
    +2:262c +3:262c +
    petId (required)
    +====1 +1:211c +
    Body Parameter — Pet object that needs to be added to the store
    +2:264,270c +3:264,270c +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    + +
    Form Parameter — file to upload
    +====1 +1:223,225c +
    post: /pet
    +
    addPet Add a new pet to the store
    +
    +2:282,285c +3:282,285c +
    get: /store/inventory
    +
    Store
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +====1 +1:229,231c +
    body (optional)
    + +
    Body Parameter — Pet object that needs to be added to the store
    +2:288a +3:288a +====1 +1:236c +
    +2:293c +3:293c + +====1 +1:238a +2:296,305c +3:296,305c +

    Example data

    +
    Content-Type: application/json
    +
    {
    +    "key" : 123
    +  }
    + +

    Example data

    +
    Content-Type: application/xml
    +
    not implemented com.wordnik.swagger.models.properties.MapProperty@3e
    + +====1 +1:243,245c +
    get: /pet/findByStatus
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +2:310,313c +3:310,313c +
    post: /store/order
    +
    Store
    +
    placeOrder Place an order for a pet
    +
    +====1 +1:249c +
    status (optional)
    +2:317c +3:317c +
    body (optional)
    +====1 +1:251c +
    Query Parameter — Status values that need to be considered for filter
    +2:319c +3:319c +
    Body Parameter — order placed for purchasing the pet
    +====1 +1:256c + +2:324c +3:324c + +====1 +1:261c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:329,336c +3:329,336c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.118+0000"
    +  }
    +====1 +1:265c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:340,347c +3:340,347c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.120Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:271,273c +
    get: /pet/findByTags
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +2:353,356c +3:353,356c +
    get: /store/order/{orderId}
    +
    Store
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +====1 +1:277c +
    tags (optional)
    +2:360c +3:360c +
    orderId (required)
    +====1 +1:279c +
    Query Parameter — Tags to filter by
    +2:362c +3:362c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:284c + +2:367c +3:367c + +====1 +1:289c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:372,379c +3:372,379c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.121+0000"
    +  }
    +====1 +1:293c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:383,390c +3:383,390c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.122Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:299,301c +
    get: /pet/{petId}
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +2:396,399c +3:396,399c +
    delete: /store/order/{orderId}
    +
    Store
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +====1 +1:305c +
    petId (required)
    +2:403c +3:403c +
    orderId (required)
    +====1 +1:307c +
    Path Parameter — ID of pet that needs to be fetched
    +2:405c +3:405c +
    Path Parameter — ID of the order that needs to be deleted
    +====1 +1:312c + +2:410c +3:410c +
    +====1 +1:315,322c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    + +2:412a +3:412a +====1 +1:327,329c +
    post: /pet/{petId}
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +2:417,420c +3:417,420c +
    post: /user
    +
    User
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    +====1 +1:333,339c +
    petId (required)
    + +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    + +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    +2:424c +3:424c +
    body (optional)
    +====1 +1:341c +
    Form Parameter — Updated status of the pet
    +2:426c +3:426c +
    Body Parameter — Created user object
    +====1 +1:353,354c +
    delete: /pet/{petId}
    +
    deletePet Deletes a pet
    +2:438,440c +3:438,440c +
    post: /user/createWithArray
    +
    User
    +
    createUsersWithArrayInput Creates list of users with given input array
    +====1 +1:359,362c +
    api_key (optional)
    + +
    Header Parameter
    +
    petId (required)
    +2:445c +3:445c +
    body (optional)
    +====1 +1:364c +
    Path Parameter — Pet id to delete
    +2:447c +3:447c +
    Body Parameter — List of user object
    +====1 +1:376,377c +
    post: /pet/{petId}/uploadImage
    +
    uploadFile uploads an image
    +2:459,461c +3:459,461c +
    post: /user/createWithList
    +
    User
    +
    createUsersWithListInput Creates list of users with given input array
    +====1 +1:382,388c +
    petId (required)
    + +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    +2:466c +3:466c +
    body (optional)
    +====1 +1:390c +
    Form Parameter — file to upload
    +2:468c +3:468c +
    Body Parameter — List of user object
    +====1 +1:401,402c + + +2:478a +3:478a +====1 +1:404,406c +
    get: /store/inventory
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +2:480,483c +3:480,483c +
    get: /user/login
    +
    User
    +
    loginUser Logs user into the system
    +
    +====1 +1:409a +2:487,492c +3:487,492c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    + +
    Query Parameter — The password for login in clear text
    +====1 +1:414c + +2:497c +3:497c + +====1 +1:419c +
    {\n  "key" : 123\n}
    +2:502c +3:502c +
    "aeiou"
    +====1 +1:423c +
    not implemented com.wordnik.swagger.models.properties.MapProperty@2acca551
    +2:506c +3:506c +
    string
    +====1 +1:429,430c +
    post: /store/order
    +
    placeOrder Place an order for a pet
    +2:512,514c +3:512,514c +
    get: /user/logout
    +
    User
    +
    logoutUser Logs out current logged in user session
    +====1 +1:435,437c +
    body (optional)
    + +
    Body Parameter — order placed for purchasing the pet
    +2:518a +3:518a +====1 +1:442c + +2:523c +3:523c +
    +====1 +1:445,452c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.855+0000"\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.857Z\n  string\n  true\n
    + +2:525a +3:525a +====1 +1:457,459c +
    get: /store/order/{orderId}
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +2:530,533c +3:530,533c +
    get: /user/{username}
    +
    User
    +
    getUserByName Get user by user name
    +
    +====1 +1:463c +
    orderId (required)
    +2:537c +3:537c +
    username (required)
    +====1 +1:465c +
    Path Parameter — ID of pet that needs to be fetched
    +2:539c +3:539c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +====1 +1:470c + +2:544c +3:544c + +====1 +1:475c +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.859+0000"\n}
    +2:549,558c +3:549,558c +
    {
    +    "id" : 1,
    +    "username" : "johnp",
    +    "firstName" : "John",
    +    "lastName" : "Public",
    +    "email" : "johnp@swagger.io",
    +    "password" : "-secret-",
    +    "phone" : "0123456789",
    +    "userStatus" : 0
    +  }
    +====1 +1:477,479c +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.859Z\n  string\n  true\n
    +2:560,582c +3:560,582c +
    +
    + +
    +
    put: /user/{username}
    +
    User
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    username (required)
    + +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    + +
    Body Parameter — Updated user object
    + +
    +

    Return type

    + +
    + +====1 +1:485,487c +
    delete: /store/order/{orderId}
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +2:588,591c +3:588,591c +
    delete: /user/{username}
    +
    User
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +====1 +1:491c +
    orderId (required)
    +2:595c +3:595c +
    username (required)
    +====1 +1:493c +
    Path Parameter — ID of the order that needs to be deleted
    +2:597c +3:597c +
    Path Parameter — The name that needs to be deleted
    +====1 +1:633a +2:738c +3:738c + margin-bottom: 2px; +====1 +1:714a +2:820,832c +3:820,832c + .method-tags { + text-align: right; + } + + .method-tag { + background: none repeat scroll 0% 0% #24A600; + border-radius: 3px; + padding: 2px 10px; + margin: 2px; + color: #FFF; + display: inline-block; + text-decoration: none; + } diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports_ignorespace/diff_pom.mustache.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports_ignorespace/diff_pom.mustache.txt new file mode 100644 index 0000000000..980e2155c8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports_ignorespace/diff_pom.mustache.txt @@ -0,0 +1,29 @@ +====1 +1:64a +2:65,83c +3:65,83c + + org.codehaus.mojo + build-helper-maven-plugin + 1.9.1 + + + add-source + generate-sources + + add-source + + + + src/gen/java + + + + + +====1 +1:132c + 1.5.0-M2 +2:151c +3:151c + 1.5.2-M2 diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..39777c50de --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt @@ -0,0 +1,6 @@ +====1 +1:132c + 1.5.0-M2 +2:132c +3:132c + 1.5.2-M2 diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_histogram/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_histogram/diff_README.md.txt new file mode 100644 index 0000000000..6e7838f2c6 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_histogram/diff_README.md.txt @@ -0,0 +1,28 @@ +==== +1:21c + 1.5.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +2:21,27c + <<<<<<< HEAD + 2.1.1-M2-SNAPSHOT | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) + ||||||| c04f947a85 + 1.5.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) + ======= + 2.1.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) + >>>>>>> TEMP_RIGHT_BRANCH +3:21c + 2.1.1-M2-SNAPSHOT | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +====1 +1:152a +2:159c +3:153c + PerlClientCodegen.java +====1 +1:153a +2:161c +3:155c + Python3ClientCodegen.java +====1 +1:154a +2:163c +3:157c + Qt5CPPGenerator.java diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_histogram/diff_index.html.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_histogram/diff_index.html.txt new file mode 100644 index 0000000000..5d2de91649 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_histogram/diff_index.html.txt @@ -0,0 +1,862 @@ +====1 +1:5c + +2:4a +3:4a +====1 +1:22,43c +
    post: /user
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    body (optional)
    + +
    Body Parameter — Created user object
    + +
    +

    Return type

    + +
    + + +
    +
    + +
    +
    post: /user/createWithArray
    +
    createUsersWithArrayInput Creates list of users with given input array
    +2:21,23c +3:21,23c +
    put: /pet
    +
    Pet
    +
    updatePet Update an existing pet
    +====1 +1:50c +
    Body Parameter — List of user object
    +2:30c +3:30c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:62,63c +
    post: /user/createWithList
    +
    createUsersWithListInput Creates list of users with given input array
    +2:42,44c +3:42,44c +
    post: /pet
    +
    Pet
    +
    addPet Add a new pet to the store
    +====1 +1:70c +
    Body Parameter — List of user object
    +2:51c +3:51c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:82,84c +
    get: /user/login
    +
    loginUser Logs user into the system
    +
    +2:63,66c +3:63,66c +
    get: /pet/findByStatus
    +
    Pet
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +====1 +1:88,91c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    +2:70c +3:70c +
    status (optional)
    +====1 +1:93c +
    Query Parameter — The password for login in clear text
    +2:72c +3:72c +
    Query Parameter — Status values that need to be considered for filter default: available
    +====1 +1:98c + +2:77c +3:77c + +====1 +1:103c +
    "aeiou"
    +2:82,95c +3:82,95c +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    +====1 +1:107c +
    string
    +2:99,104c +3:99,104c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:113,115c +
    get: /user/logout
    +
    logoutUser Logs out current logged in user session
    +
    +2:110,113c +3:110,113c +
    get: /pet/findByTags
    +
    Pet
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +====1 +1:118a +2:117,119c +3:117,119c +
    tags (optional)
    + +
    Query Parameter — Tags to filter by
    +====1 +1:123c +
    +2:124c +3:124c + +====1 +1:125a +2:127,152c +3:127,152c +

    Example data

    +
    Content-Type: application/json
    +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    + +

    Example data

    +
    Content-Type: application/xml
    +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    + +====1 +1:130,132c +
    get: /user/{username}
    +
    getUserByName Get user by user name
    +
    +2:157,160c +3:157,160c +
    get: /pet/{petId}
    +
    Pet
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +====1 +1:136c +
    username (required)
    +2:164c +3:164c +
    petId (required)
    +====1 +1:138c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +2:166c +3:166c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:143c + +2:171c +3:171c + +====1 +1:148c +
    {\n  "id" : 123456789,\n  "lastName" : "aeiou",\n  "phone" : "aeiou",\n  "username" : "aeiou",\n  "email" : "aeiou",\n  "userStatus" : 123,\n  "firstName" : "aeiou",\n  "password" : "aeiou"\n}
    +2:176,189c +3:176,189c +
    {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  }
    +====1 +1:152c +
    \n  123456\n  string\n  string\n  string\n  string\n  string\n  string\n  0\n
    +2:193,198c +3:193,198c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:158,160c +
    put: /user/{username}
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    +2:204,207c +3:204,207c +
    post: /pet/{petId}
    +
    Pet
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +====1 +1:164c +
    username (required)
    +2:211c +3:211c +
    petId (required)
    +====1 +1:166,167c +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    +2:213,214c +3:213,214c +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    +====1 +1:169c +
    Body Parameter — Updated user object
    +2:216,219c +3:216,219c +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    + +
    Form Parameter — Updated status of the pet
    +====1 +1:181,183c +
    delete: /user/{username}
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +2:231,234c +3:231,234c +
    delete: /pet/{petId}
    +
    Pet
    +
    deletePet Deletes a pet
    +
    +====1 +1:187c +
    username (required)
    +2:238c +3:238c +
    api_key (optional)
    +====1 +1:189c +
    Path Parameter — The name that needs to be deleted
    +2:240,243c +3:240,243c +
    Header Parameter
    +
    petId (required)
    + +
    Path Parameter — Pet id to delete
    +====1 +1:200,201c + + +2:253a +3:253a +====1 +1:203,204c +
    put: /pet
    +
    updatePet Update an existing pet
    +2:255,257c +3:255,257c +
    post: /pet/{petId}/uploadImage
    +
    Pet
    +
    uploadFile uploads an image
    +====1 +1:209c +
    body (optional)
    +2:262c +3:262c +
    petId (required)
    +====1 +1:211c +
    Body Parameter — Pet object that needs to be added to the store
    +2:264,270c +3:264,270c +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    + +
    Form Parameter — file to upload
    +====1 +1:223,225c +
    post: /pet
    +
    addPet Add a new pet to the store
    +
    +2:282,285c +3:282,285c +
    get: /store/inventory
    +
    Store
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +====1 +1:229,231c +
    body (optional)
    + +
    Body Parameter — Pet object that needs to be added to the store
    +2:288a +3:288a +====1 +1:236c +
    +2:293c +3:293c + +==== +1:238a +2:296,311c +

    Example data

    +
    Content-Type: application/json
    +
    {
    +    "key" : 123
    +  }
    + +

    Example data

    +
    Content-Type: application/xml
    + <<<<<<< HEAD +
    not implemented com.wordnik.swagger.models.properties.MapProperty@3e
    + ||||||| c04f947a85 +
    not implemented com.wordnik.swagger.models.properties.MapProperty@2acca551
    + ======= +
    not implemented com.wordnik.swagger.models.properties.MapProperty@787b217
    + >>>>>>> TEMP_RIGHT_BRANCH + +3:296,305c +

    Example data

    +
    Content-Type: application/json
    +
    {
    +    "key" : 123
    +  }
    + +

    Example data

    +
    Content-Type: application/xml
    +
    not implemented com.wordnik.swagger.models.properties.MapProperty@3e
    + +====1 +1:243,245c +
    get: /pet/findByStatus
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +2:316,319c +3:310,313c +
    post: /store/order
    +
    Store
    +
    placeOrder Place an order for a pet
    +
    +====1 +1:249c +
    status (optional)
    +2:323c +3:317c +
    body (optional)
    +====1 +1:251c +
    Query Parameter — Status values that need to be considered for filter
    +2:325c +3:319c +
    Body Parameter — order placed for purchasing the pet
    +====1 +1:256c + +2:330c +3:324c + +==== +1:261c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:335,348c + <<<<<<< HEAD +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.118+0000"
    +  }
    + ||||||| c04f947a85 +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.855+0000"\n}
    + ======= +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-06T14:06:47.931+0000"\n}
    + >>>>>>> TEMP_RIGHT_BRANCH +3:329,336c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.118+0000"
    +  }
    +==== +1:265c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:352,365c + <<<<<<< HEAD +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.120Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    + ||||||| c04f947a85 +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.857Z\n  string\n  true\n
    + ======= +
    \n  123456\n  123456\n  0\n  2015-04-06T08:06:47.934Z\n  string\n  true\n
    + >>>>>>> TEMP_RIGHT_BRANCH +3:340,347c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.120Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:271,273c +
    get: /pet/findByTags
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +2:371,374c +3:353,356c +
    get: /store/order/{orderId}
    +
    Store
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +====1 +1:277c +
    tags (optional)
    +2:378c +3:360c +
    orderId (required)
    +====1 +1:279c +
    Query Parameter — Tags to filter by
    +2:380c +3:362c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:284c + +2:385c +3:367c + +==== +1:289c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:390,403c + <<<<<<< HEAD +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.121+0000"
    +  }
    + ||||||| c04f947a85 +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.859+0000"\n}
    + ======= +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-06T14:06:47.935+0000"\n}
    + >>>>>>> TEMP_RIGHT_BRANCH +3:372,379c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.121+0000"
    +  }
    +==== +1:293c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:407,420c + <<<<<<< HEAD +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.122Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    + ||||||| c04f947a85 +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.859Z\n  string\n  true\n
    + ======= +
    \n  123456\n  123456\n  0\n  2015-04-06T08:06:47.935Z\n  string\n  true\n
    + >>>>>>> TEMP_RIGHT_BRANCH +3:383,390c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.122Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:299,301c +
    get: /pet/{petId}
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +2:426,429c +3:396,399c +
    delete: /store/order/{orderId}
    +
    Store
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +====1 +1:305c +
    petId (required)
    +2:433c +3:403c +
    orderId (required)
    +====1 +1:307c +
    Path Parameter — ID of pet that needs to be fetched
    +2:435c +3:405c +
    Path Parameter — ID of the order that needs to be deleted
    +====1 +1:312c + +2:440c +3:410c +
    +====1 +1:315,322c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    + +2:442a +3:412a +====1 +1:327,329c +
    post: /pet/{petId}
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +2:447,450c +3:417,420c +
    post: /user
    +
    User
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    +====1 +1:333,339c +
    petId (required)
    + +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    + +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    +2:454c +3:424c +
    body (optional)
    +====1 +1:341c +
    Form Parameter — Updated status of the pet
    +2:456c +3:426c +
    Body Parameter — Created user object
    +====1 +1:353,354c +
    delete: /pet/{petId}
    +
    deletePet Deletes a pet
    +2:468,470c +3:438,440c +
    post: /user/createWithArray
    +
    User
    +
    createUsersWithArrayInput Creates list of users with given input array
    +====1 +1:359,362c +
    api_key (optional)
    + +
    Header Parameter
    +
    petId (required)
    +2:475c +3:445c +
    body (optional)
    +====1 +1:364c +
    Path Parameter — Pet id to delete
    +2:477c +3:447c +
    Body Parameter — List of user object
    +====1 +1:376,377c +
    post: /pet/{petId}/uploadImage
    +
    uploadFile uploads an image
    +2:489,491c +3:459,461c +
    post: /user/createWithList
    +
    User
    +
    createUsersWithListInput Creates list of users with given input array
    +====1 +1:382,388c +
    petId (required)
    + +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    +2:496c +3:466c +
    body (optional)
    +====1 +1:390c +
    Form Parameter — file to upload
    +2:498c +3:468c +
    Body Parameter — List of user object
    +====1 +1:401,402c + + +2:508a +3:478a +====1 +1:404,406c +
    get: /store/inventory
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +2:510,513c +3:480,483c +
    get: /user/login
    +
    User
    +
    loginUser Logs user into the system
    +
    +====1 +1:409a +2:517,522c +3:487,492c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    + +
    Query Parameter — The password for login in clear text
    +====1 +1:414c + +2:527c +3:497c + +====1 +1:419c +
    {\n  "key" : 123\n}
    +2:532c +3:502c +
    "aeiou"
    +====1 +1:423c +
    not implemented com.wordnik.swagger.models.properties.MapProperty@2acca551
    +2:536c +3:506c +
    string
    +====1 +1:429,430c +
    post: /store/order
    +
    placeOrder Place an order for a pet
    +2:542,544c +3:512,514c +
    get: /user/logout
    +
    User
    +
    logoutUser Logs out current logged in user session
    +====1 +1:435,437c +
    body (optional)
    + +
    Body Parameter — order placed for purchasing the pet
    +2:548a +3:518a +====1 +1:442c + +2:553c +3:523c +
    +====1 +1:445,452c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.855+0000"\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.857Z\n  string\n  true\n
    + +2:555a +3:525a +====1 +1:457,459c +
    get: /store/order/{orderId}
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +2:560,563c +3:530,533c +
    get: /user/{username}
    +
    User
    +
    getUserByName Get user by user name
    +
    +====1 +1:463c +
    orderId (required)
    +2:567c +3:537c +
    username (required)
    +====1 +1:465c +
    Path Parameter — ID of pet that needs to be fetched
    +2:569c +3:539c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +====1 +1:470c + +2:574c +3:544c + +====1 +1:475c +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.859+0000"\n}
    +2:579,588c +3:549,558c +
    {
    +    "id" : 1,
    +    "username" : "johnp",
    +    "firstName" : "John",
    +    "lastName" : "Public",
    +    "email" : "johnp@swagger.io",
    +    "password" : "-secret-",
    +    "phone" : "0123456789",
    +    "userStatus" : 0
    +  }
    +====1 +1:477,479c +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.859Z\n  string\n  true\n
    +2:590,612c +3:560,582c +
    +
    + +
    +
    put: /user/{username}
    +
    User
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    username (required)
    + +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    + +
    Body Parameter — Updated user object
    + +
    +

    Return type

    + +
    + +====1 +1:485,487c +
    delete: /store/order/{orderId}
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +2:618,621c +3:588,591c +
    delete: /user/{username}
    +
    User
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +====1 +1:491c +
    orderId (required)
    +2:625c +3:595c +
    username (required)
    +====1 +1:493c +
    Path Parameter — ID of the order that needs to be deleted
    +2:627c +3:597c +
    Path Parameter — The name that needs to be deleted
    +====1 +1:633a +2:768c +3:738c + margin-bottom: 2px; +====1 +1:714a +2:850,862c +3:820,832c + .method-tags { + text-align: right; + } + + .method-tag { + background: none repeat scroll 0% 0% #24A600; + border-radius: 3px; + padding: 2px 10px; + margin: 2px; + color: #FFF; + display: inline-block; + text-decoration: none; + } diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_histogram/diff_pom.mustache.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_histogram/diff_pom.mustache.txt new file mode 100644 index 0000000000..2accbb8523 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_histogram/diff_pom.mustache.txt @@ -0,0 +1,36 @@ +====1 +1:64a +2:65,83c +3:65,83c + + org.codehaus.mojo + build-helper-maven-plugin + 1.9.1 + + + add-source + generate-sources + + add-source + + + + src/gen/java + + + + + +==== +1:132c + 1.5.0-M2 +2:151,157c + <<<<<<< HEAD + 1.5.2-M2 + ||||||| c04f947a85 + 1.5.0-M2 + ======= + 2.1.0-M2 + >>>>>>> TEMP_RIGHT_BRANCH +3:151c + 1.5.2-M2 diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_histogram/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_histogram/diff_pom.xml.txt new file mode 100644 index 0000000000..61270e2a62 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_histogram/diff_pom.xml.txt @@ -0,0 +1,25 @@ +==== +1:6c + 1.5.0-M2 +2:6,12c + <<<<<<< HEAD + 2.1.1-M2-SNAPSHOT + ||||||| c04f947a85 + 1.5.0-M2 + ======= + 2.1.0-M2 + >>>>>>> TEMP_RIGHT_BRANCH +3:6c + 2.1.1-M2-SNAPSHOT +====1 +1:17a +2:24,31c +3:18,25c + + + src/main/resources + + logback.xml + + + diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_ignorespace/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_ignorespace/diff_README.md.txt new file mode 100644 index 0000000000..6e7838f2c6 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_ignorespace/diff_README.md.txt @@ -0,0 +1,28 @@ +==== +1:21c + 1.5.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +2:21,27c + <<<<<<< HEAD + 2.1.1-M2-SNAPSHOT | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) + ||||||| c04f947a85 + 1.5.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) + ======= + 2.1.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) + >>>>>>> TEMP_RIGHT_BRANCH +3:21c + 2.1.1-M2-SNAPSHOT | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +====1 +1:152a +2:159c +3:153c + PerlClientCodegen.java +====1 +1:153a +2:161c +3:155c + Python3ClientCodegen.java +====1 +1:154a +2:163c +3:157c + Qt5CPPGenerator.java diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_ignorespace/diff_index.html.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_ignorespace/diff_index.html.txt new file mode 100644 index 0000000000..9752bdf3ad --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_ignorespace/diff_index.html.txt @@ -0,0 +1,873 @@ +====1 +1:5c + +2:4a +3:4a +====1 +1:22,43c +
    post: /user
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    body (optional)
    + +
    Body Parameter — Created user object
    + +
    +

    Return type

    + +
    + + +
    +
    + +
    +
    post: /user/createWithArray
    +
    createUsersWithArrayInput Creates list of users with given input array
    +2:21,23c +3:21,23c +
    put: /pet
    +
    Pet
    +
    updatePet Update an existing pet
    +====1 +1:50c +
    Body Parameter — List of user object
    +2:30c +3:30c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:62,63c +
    post: /user/createWithList
    +
    createUsersWithListInput Creates list of users with given input array
    +2:42,44c +3:42,44c +
    post: /pet
    +
    Pet
    +
    addPet Add a new pet to the store
    +====1 +1:70c +
    Body Parameter — List of user object
    +2:51c +3:51c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:82,84c +
    get: /user/login
    +
    loginUser Logs user into the system
    +
    +2:63,66c +3:63,66c +
    get: /pet/findByStatus
    +
    Pet
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +====1 +1:88,91c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    +2:70c +3:70c +
    status (optional)
    +====1 +1:93c +
    Query Parameter — The password for login in clear text
    +2:72c +3:72c +
    Query Parameter — Status values that need to be considered for filter default: available
    +====1 +1:98c + +2:77c +3:77c + +====1 +1:103c +
    "aeiou"
    +2:82,95c +3:82,95c +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    +====1 +1:107c +
    string
    +2:99,104c +3:99,104c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:113,115c +
    get: /user/logout
    +
    logoutUser Logs out current logged in user session
    +
    +2:110,113c +3:110,113c +
    get: /pet/findByTags
    +
    Pet
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +====1 +1:118a +2:117,119c +3:117,119c +
    tags (optional)
    + +
    Query Parameter — Tags to filter by
    +====1 +1:123c +
    +2:124c +3:124c + +====1 +1:125a +2:127,152c +3:127,152c +

    Example data

    +
    Content-Type: application/json
    +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    + +

    Example data

    +
    Content-Type: application/xml
    +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    + +====1 +1:130,132c +
    get: /user/{username}
    +
    getUserByName Get user by user name
    +
    +2:157,160c +3:157,160c +
    get: /pet/{petId}
    +
    Pet
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +====1 +1:136c +
    username (required)
    +2:164c +3:164c +
    petId (required)
    +====1 +1:138c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +2:166c +3:166c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:143c + +2:171c +3:171c + +====1 +1:148c +
    {\n  "id" : 123456789,\n  "lastName" : "aeiou",\n  "phone" : "aeiou",\n  "username" : "aeiou",\n  "email" : "aeiou",\n  "userStatus" : 123,\n  "firstName" : "aeiou",\n  "password" : "aeiou"\n}
    +2:176,189c +3:176,189c +
    {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  }
    +====1 +1:152c +
    \n  123456\n  string\n  string\n  string\n  string\n  string\n  string\n  0\n
    +2:193,198c +3:193,198c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:158,160c +
    put: /user/{username}
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    +2:204,207c +3:204,207c +
    post: /pet/{petId}
    +
    Pet
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +====1 +1:164c +
    username (required)
    +2:211c +3:211c +
    petId (required)
    +====1 +1:166,167c +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    +2:213,214c +3:213,214c +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    +====1 +1:169c +
    Body Parameter — Updated user object
    +2:216,219c +3:216,219c +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    + +
    Form Parameter — Updated status of the pet
    +====1 +1:181,183c +
    delete: /user/{username}
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +2:231,234c +3:231,234c +
    delete: /pet/{petId}
    +
    Pet
    +
    deletePet Deletes a pet
    +
    +====1 +1:187c +
    username (required)
    +2:238c +3:238c +
    api_key (optional)
    +====1 +1:189c +
    Path Parameter — The name that needs to be deleted
    +2:240,243c +3:240,243c +
    Header Parameter
    +
    petId (required)
    + +
    Path Parameter — Pet id to delete
    +====1 +1:200,201c + + +2:253a +3:253a +====1 +1:203,204c +
    put: /pet
    +
    updatePet Update an existing pet
    +2:255,257c +3:255,257c +
    post: /pet/{petId}/uploadImage
    +
    Pet
    +
    uploadFile uploads an image
    +====1 +1:209c +
    body (optional)
    +2:262c +3:262c +
    petId (required)
    +====1 +1:211c +
    Body Parameter — Pet object that needs to be added to the store
    +2:264,270c +3:264,270c +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    + +
    Form Parameter — file to upload
    +====1 +1:223,225c +
    post: /pet
    +
    addPet Add a new pet to the store
    +
    +2:282,285c +3:282,285c +
    get: /store/inventory
    +
    Store
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +====1 +1:229,231c +
    body (optional)
    + +
    Body Parameter — Pet object that needs to be added to the store
    +2:288a +3:288a +====1 +1:236c +
    +2:293c +3:293c + +====1 +1:238a +2:296,305c +3:296,305c +

    Example data

    +
    Content-Type: application/json
    +
    {
    +    "key" : 123
    +  }
    + +

    Example data

    +
    Content-Type: application/xml
    +
    not implemented com.wordnik.swagger.models.properties.MapProperty@3e
    + +====1 +1:243,245c +
    get: /pet/findByStatus
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +2:310,313c +3:310,313c +
    post: /store/order
    +
    Store
    +
    placeOrder Place an order for a pet
    +
    +====1 +1:249c +
    status (optional)
    +2:317c +3:317c +
    body (optional)
    +====1 +1:251c +
    Query Parameter — Status values that need to be considered for filter
    +2:319c +3:319c +
    Body Parameter — order placed for purchasing the pet
    +====1 +1:256c + +2:324c +3:324c + +====1 +1:261c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:329,336c +3:329,336c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.118+0000"
    +  }
    +====1 +1:265c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:340,347c +3:340,347c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.120Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:271,273c +
    get: /pet/findByTags
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +2:353,356c +3:353,356c +
    get: /store/order/{orderId}
    +
    Store
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +====1 +1:277c +
    tags (optional)
    +2:360c +3:360c +
    orderId (required)
    +====1 +1:279c +
    Query Parameter — Tags to filter by
    +2:362c +3:362c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:284c + +2:367c +3:367c + +====1 +1:289c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:372,379c +3:372,379c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.121+0000"
    +  }
    +====1 +1:293c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:383,390c +3:383,390c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.122Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:299,301c +
    get: /pet/{petId}
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +2:396,399c +3:396,399c +
    delete: /store/order/{orderId}
    +
    Store
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +====1 +1:305c +
    petId (required)
    +2:403c +3:403c +
    orderId (required)
    +====1 +1:307c +
    Path Parameter — ID of pet that needs to be fetched
    +2:405c +3:405c +
    Path Parameter — ID of the order that needs to be deleted
    +====1 +1:312c + +2:410c +3:410c +
    +====1 +1:315,322c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    + +2:412a +3:412a +====1 +1:327,329c +
    post: /pet/{petId}
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +2:417,420c +3:417,420c +
    post: /user
    +
    User
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    +====1 +1:333,339c +
    petId (required)
    + +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    + +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    +2:424c +3:424c +
    body (optional)
    +====1 +1:341c +
    Form Parameter — Updated status of the pet
    +2:426c +3:426c +
    Body Parameter — Created user object
    +====1 +1:353,354c +
    delete: /pet/{petId}
    +
    deletePet Deletes a pet
    +2:438,440c +3:438,440c +
    post: /user/createWithArray
    +
    User
    +
    createUsersWithArrayInput Creates list of users with given input array
    +====1 +1:359,362c +
    api_key (optional)
    + +
    Header Parameter
    +
    petId (required)
    +2:445c +3:445c +
    body (optional)
    +====1 +1:364c +
    Path Parameter — Pet id to delete
    +2:447c +3:447c +
    Body Parameter — List of user object
    +====1 +1:376,377c +
    post: /pet/{petId}/uploadImage
    +
    uploadFile uploads an image
    +2:459,461c +3:459,461c +
    post: /user/createWithList
    +
    User
    +
    createUsersWithListInput Creates list of users with given input array
    +====1 +1:382,388c +
    petId (required)
    + +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    +2:466c +3:466c +
    body (optional)
    +====1 +1:390c +
    Form Parameter — file to upload
    +2:468c +3:468c +
    Body Parameter — List of user object
    +====1 +1:401,402c + + +2:478a +3:478a +====1 +1:404,406c +
    get: /store/inventory
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +2:480,483c +3:480,483c +
    get: /user/login
    +
    User
    +
    loginUser Logs user into the system
    +
    +====1 +1:409a +2:487,492c +3:487,492c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    + +
    Query Parameter — The password for login in clear text
    +====1 +1:414c + +2:497c +3:497c + +====1 +1:419c +
    {\n  "key" : 123\n}
    +2:502c +3:502c +
    "aeiou"
    +==== +1:423c +
    not implemented com.wordnik.swagger.models.properties.MapProperty@2acca551
    +2:506,512c + <<<<<<< HEAD +
    string
    + ||||||| c04f947a85 +
    not implemented com.wordnik.swagger.models.properties.MapProperty@2acca551
    + ======= +
    not implemented com.wordnik.swagger.models.properties.MapProperty@787b217
    + >>>>>>> TEMP_RIGHT_BRANCH +3:506c +
    string
    +====1 +1:429,430c +
    post: /store/order
    +
    placeOrder Place an order for a pet
    +2:518,520c +3:512,514c +
    get: /user/logout
    +
    User
    +
    logoutUser Logs out current logged in user session
    +====1 +1:435,437c +
    body (optional)
    + +
    Body Parameter — order placed for purchasing the pet
    +2:524a +3:518a +==== +1:442c + +2:529,532c + <<<<<<< HEAD +
    + ||||||| c04f947a85 + +3:523c +
    +==== +1:445,452c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.855+0000"\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.857Z\n  string\n  true\n
    + +2:535,557c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.855+0000"\n}
    + ======= + + + +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-06T14:06:47.931+0000"\n}
    + >>>>>>> TEMP_RIGHT_BRANCH + + <<<<<<< HEAD + ||||||| c04f947a85 +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.857Z\n  string\n  true\n
    + ======= +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-06T08:06:47.934Z\n  string\n  true\n
    + >>>>>>> TEMP_RIGHT_BRANCH + +3:525a +====1 +1:457,459c +
    get: /store/order/{orderId}
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +2:562,565c +3:530,533c +
    get: /user/{username}
    +
    User
    +
    getUserByName Get user by user name
    +
    +====1 +1:463c +
    orderId (required)
    +2:569c +3:537c +
    username (required)
    +====1 +1:465c +
    Path Parameter — ID of pet that needs to be fetched
    +2:571c +3:539c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +====1 +1:470c + +2:576c +3:544c + +==== +1:475c +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.859+0000"\n}
    +2:581,591c + <<<<<<< HEAD +
    {
    +    "id" : 1,
    +    "username" : "johnp",
    +    "firstName" : "John",
    +    "lastName" : "Public",
    +    "email" : "johnp@swagger.io",
    +    "password" : "-secret-",
    +    "phone" : "0123456789",
    +    "userStatus" : 0
    +  }
    +3:549,558c +
    {
    +    "id" : 1,
    +    "username" : "johnp",
    +    "firstName" : "John",
    +    "lastName" : "Public",
    +    "email" : "johnp@swagger.io",
    +    "password" : "-secret-",
    +    "phone" : "0123456789",
    +    "userStatus" : 0
    +  }
    +==== +1:477,479c +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.859Z\n  string\n  true\n
    +2:593,630c +
    +
    + +
    +
    put: /user/{username}
    +
    User
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    username (required)
    + +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    + +
    Body Parameter — Updated user object
    + +
    +

    Return type

    + +
    + ||||||| c04f947a85 +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.859+0000"\n}
    + ======= +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-06T14:06:47.935+0000"\n}
    + >>>>>>> TEMP_RIGHT_BRANCH + + <<<<<<< HEAD + ||||||| c04f947a85 +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.859Z\n  string\n  true\n
    + ======= +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-06T08:06:47.935Z\n  string\n  true\n
    + >>>>>>> TEMP_RIGHT_BRANCH +3:560,582c +
    +
    + +
    +
    put: /user/{username}
    +
    User
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    username (required)
    + +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    + +
    Body Parameter — Updated user object
    + +
    +

    Return type

    + +
    + +====1 +1:485,487c +
    delete: /store/order/{orderId}
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +2:636,639c +3:588,591c +
    delete: /user/{username}
    +
    User
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +====1 +1:491c +
    orderId (required)
    +2:643c +3:595c +
    username (required)
    +====1 +1:493c +
    Path Parameter — ID of the order that needs to be deleted
    +2:645c +3:597c +
    Path Parameter — The name that needs to be deleted
    +====1 +1:633a +2:786c +3:738c + margin-bottom: 2px; +====1 +1:714a +2:868,880c +3:820,832c + .method-tags { + text-align: right; + } + + .method-tag { + background: none repeat scroll 0% 0% #24A600; + border-radius: 3px; + padding: 2px 10px; + margin: 2px; + color: #FFF; + display: inline-block; + text-decoration: none; + } diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_ignorespace/diff_pom.mustache.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_ignorespace/diff_pom.mustache.txt new file mode 100644 index 0000000000..2accbb8523 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_ignorespace/diff_pom.mustache.txt @@ -0,0 +1,36 @@ +====1 +1:64a +2:65,83c +3:65,83c + + org.codehaus.mojo + build-helper-maven-plugin + 1.9.1 + + + add-source + generate-sources + + add-source + + + + src/gen/java + + + + + +==== +1:132c + 1.5.0-M2 +2:151,157c + <<<<<<< HEAD + 1.5.2-M2 + ||||||| c04f947a85 + 1.5.0-M2 + ======= + 2.1.0-M2 + >>>>>>> TEMP_RIGHT_BRANCH +3:151c + 1.5.2-M2 diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..61270e2a62 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_ignorespace/diff_pom.xml.txt @@ -0,0 +1,25 @@ +==== +1:6c + 1.5.0-M2 +2:6,12c + <<<<<<< HEAD + 2.1.1-M2-SNAPSHOT + ||||||| c04f947a85 + 1.5.0-M2 + ======= + 2.1.0-M2 + >>>>>>> TEMP_RIGHT_BRANCH +3:6c + 2.1.1-M2-SNAPSHOT +====1 +1:17a +2:24,31c +3:18,25c + + + src/main/resources + + logback.xml + + + diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_minimal/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_minimal/diff_README.md.txt new file mode 100644 index 0000000000..6e7838f2c6 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_minimal/diff_README.md.txt @@ -0,0 +1,28 @@ +==== +1:21c + 1.5.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +2:21,27c + <<<<<<< HEAD + 2.1.1-M2-SNAPSHOT | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) + ||||||| c04f947a85 + 1.5.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) + ======= + 2.1.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) + >>>>>>> TEMP_RIGHT_BRANCH +3:21c + 2.1.1-M2-SNAPSHOT | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +====1 +1:152a +2:159c +3:153c + PerlClientCodegen.java +====1 +1:153a +2:161c +3:155c + Python3ClientCodegen.java +====1 +1:154a +2:163c +3:157c + Qt5CPPGenerator.java diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_minimal/diff_index.html.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_minimal/diff_index.html.txt new file mode 100644 index 0000000000..e0f7d570fe --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_minimal/diff_index.html.txt @@ -0,0 +1,866 @@ +====1 +1:5c + +2:4a +3:4a +====1 +1:22,43c +
    post: /user
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    body (optional)
    + +
    Body Parameter — Created user object
    + +
    +

    Return type

    + +
    + + +
    +
    + +
    +
    post: /user/createWithArray
    +
    createUsersWithArrayInput Creates list of users with given input array
    +2:21,23c +3:21,23c +
    put: /pet
    +
    Pet
    +
    updatePet Update an existing pet
    +====1 +1:50c +
    Body Parameter — List of user object
    +2:30c +3:30c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:62,63c +
    post: /user/createWithList
    +
    createUsersWithListInput Creates list of users with given input array
    +2:42,44c +3:42,44c +
    post: /pet
    +
    Pet
    +
    addPet Add a new pet to the store
    +====1 +1:70c +
    Body Parameter — List of user object
    +2:51c +3:51c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:82,84c +
    get: /user/login
    +
    loginUser Logs user into the system
    +
    +2:63,66c +3:63,66c +
    get: /pet/findByStatus
    +
    Pet
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +====1 +1:88,91c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    +2:70c +3:70c +
    status (optional)
    +====1 +1:93c +
    Query Parameter — The password for login in clear text
    +2:72c +3:72c +
    Query Parameter — Status values that need to be considered for filter default: available
    +====1 +1:98c + +2:77c +3:77c + +====1 +1:103c +
    "aeiou"
    +2:82,95c +3:82,95c +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    +====1 +1:107c +
    string
    +2:99,104c +3:99,104c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:113,115c +
    get: /user/logout
    +
    logoutUser Logs out current logged in user session
    +
    +2:110,113c +3:110,113c +
    get: /pet/findByTags
    +
    Pet
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +====1 +1:118a +2:117,119c +3:117,119c +
    tags (optional)
    + +
    Query Parameter — Tags to filter by
    +====1 +1:123c +
    +2:124c +3:124c + +====1 +1:125a +2:127,152c +3:127,152c +

    Example data

    +
    Content-Type: application/json
    +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    + +

    Example data

    +
    Content-Type: application/xml
    +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    + +====1 +1:130,132c +
    get: /user/{username}
    +
    getUserByName Get user by user name
    +
    +2:157,160c +3:157,160c +
    get: /pet/{petId}
    +
    Pet
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +====1 +1:136c +
    username (required)
    +2:164c +3:164c +
    petId (required)
    +====1 +1:138c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +2:166c +3:166c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:143c + +2:171c +3:171c + +====1 +1:148c +
    {\n  "id" : 123456789,\n  "lastName" : "aeiou",\n  "phone" : "aeiou",\n  "username" : "aeiou",\n  "email" : "aeiou",\n  "userStatus" : 123,\n  "firstName" : "aeiou",\n  "password" : "aeiou"\n}
    +2:176,189c +3:176,189c +
    {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  }
    +====1 +1:152c +
    \n  123456\n  string\n  string\n  string\n  string\n  string\n  string\n  0\n
    +2:193,198c +3:193,198c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:158,160c +
    put: /user/{username}
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    +2:204,207c +3:204,207c +
    post: /pet/{petId}
    +
    Pet
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +====1 +1:164c +
    username (required)
    +2:211c +3:211c +
    petId (required)
    +====1 +1:166,167c +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    +2:213,214c +3:213,214c +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    +====1 +1:169c +
    Body Parameter — Updated user object
    +2:216,219c +3:216,219c +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    + +
    Form Parameter — Updated status of the pet
    +====1 +1:181,183c +
    delete: /user/{username}
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +2:231,234c +3:231,234c +
    delete: /pet/{petId}
    +
    Pet
    +
    deletePet Deletes a pet
    +
    +====1 +1:187c +
    username (required)
    +2:238c +3:238c +
    api_key (optional)
    +====1 +1:189c +
    Path Parameter — The name that needs to be deleted
    +2:240,243c +3:240,243c +
    Header Parameter
    +
    petId (required)
    + +
    Path Parameter — Pet id to delete
    +====1 +1:200,201c + + +2:253a +3:253a +====1 +1:203,204c +
    put: /pet
    +
    updatePet Update an existing pet
    +2:255,257c +3:255,257c +
    post: /pet/{petId}/uploadImage
    +
    Pet
    +
    uploadFile uploads an image
    +====1 +1:209c +
    body (optional)
    +2:262c +3:262c +
    petId (required)
    +====1 +1:211c +
    Body Parameter — Pet object that needs to be added to the store
    +2:264,270c +3:264,270c +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    + +
    Form Parameter — file to upload
    +====1 +1:223,225c +
    post: /pet
    +
    addPet Add a new pet to the store
    +
    +2:282,285c +3:282,285c +
    get: /store/inventory
    +
    Store
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +====1 +1:229,231c +
    body (optional)
    + +
    Body Parameter — Pet object that needs to be added to the store
    +2:288a +3:288a +====1 +1:236c +
    +2:293c +3:293c + +====1 +1:238a +2:296,305c +3:296,305c +

    Example data

    +
    Content-Type: application/json
    +
    {
    +    "key" : 123
    +  }
    + +

    Example data

    +
    Content-Type: application/xml
    +
    not implemented com.wordnik.swagger.models.properties.MapProperty@3e
    + +====1 +1:243,245c +
    get: /pet/findByStatus
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +2:310,313c +3:310,313c +
    post: /store/order
    +
    Store
    +
    placeOrder Place an order for a pet
    +
    +====1 +1:249c +
    status (optional)
    +2:317c +3:317c +
    body (optional)
    +====1 +1:251c +
    Query Parameter — Status values that need to be considered for filter
    +2:319c +3:319c +
    Body Parameter — order placed for purchasing the pet
    +====1 +1:256c + +2:324c +3:324c + +====1 +1:261c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:329,336c +3:329,336c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.118+0000"
    +  }
    +====1 +1:265c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:340,347c +3:340,347c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.120Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:271,273c +
    get: /pet/findByTags
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +2:353,356c +3:353,356c +
    get: /store/order/{orderId}
    +
    Store
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +====1 +1:277c +
    tags (optional)
    +2:360c +3:360c +
    orderId (required)
    +====1 +1:279c +
    Query Parameter — Tags to filter by
    +2:362c +3:362c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:284c + +2:367c +3:367c + +====1 +1:289c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:372,379c +3:372,379c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.121+0000"
    +  }
    +====1 +1:293c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:383,390c +3:383,390c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.122Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:299,301c +
    get: /pet/{petId}
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +2:396,399c +3:396,399c +
    delete: /store/order/{orderId}
    +
    Store
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +====1 +1:305c +
    petId (required)
    +2:403c +3:403c +
    orderId (required)
    +====1 +1:307c +
    Path Parameter — ID of pet that needs to be fetched
    +2:405c +3:405c +
    Path Parameter — ID of the order that needs to be deleted
    +====1 +1:312c + +2:410c +3:410c +
    +====1 +1:315,322c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    + +2:412a +3:412a +====1 +1:327,329c +
    post: /pet/{petId}
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +2:417,420c +3:417,420c +
    post: /user
    +
    User
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    +====1 +1:333,339c +
    petId (required)
    + +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    + +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    +2:424c +3:424c +
    body (optional)
    +====1 +1:341c +
    Form Parameter — Updated status of the pet
    +2:426c +3:426c +
    Body Parameter — Created user object
    +====1 +1:353,354c +
    delete: /pet/{petId}
    +
    deletePet Deletes a pet
    +2:438,440c +3:438,440c +
    post: /user/createWithArray
    +
    User
    +
    createUsersWithArrayInput Creates list of users with given input array
    +====1 +1:359,362c +
    api_key (optional)
    + +
    Header Parameter
    +
    petId (required)
    +2:445c +3:445c +
    body (optional)
    +====1 +1:364c +
    Path Parameter — Pet id to delete
    +2:447c +3:447c +
    Body Parameter — List of user object
    +====1 +1:376,377c +
    post: /pet/{petId}/uploadImage
    +
    uploadFile uploads an image
    +2:459,461c +3:459,461c +
    post: /user/createWithList
    +
    User
    +
    createUsersWithListInput Creates list of users with given input array
    +====1 +1:382,388c +
    petId (required)
    + +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    +2:466c +3:466c +
    body (optional)
    +====1 +1:390c +
    Form Parameter — file to upload
    +2:468c +3:468c +
    Body Parameter — List of user object
    +====1 +1:401,402c + + +2:478a +3:478a +====1 +1:404,406c +
    get: /store/inventory
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +2:480,483c +3:480,483c +
    get: /user/login
    +
    User
    +
    loginUser Logs user into the system
    +
    +====1 +1:409a +2:487,492c +3:487,492c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    + +
    Query Parameter — The password for login in clear text
    +====1 +1:414c + +2:497c +3:497c + +====1 +1:419c +
    {\n  "key" : 123\n}
    +2:502c +3:502c +
    "aeiou"
    +==== +1:423c +
    not implemented com.wordnik.swagger.models.properties.MapProperty@2acca551
    +2:506,512c + <<<<<<< HEAD +
    string
    + ||||||| c04f947a85 +
    not implemented com.wordnik.swagger.models.properties.MapProperty@2acca551
    + ======= +
    not implemented com.wordnik.swagger.models.properties.MapProperty@787b217
    + >>>>>>> TEMP_RIGHT_BRANCH +3:506c +
    string
    +====1 +1:429,430c +
    post: /store/order
    +
    placeOrder Place an order for a pet
    +2:518,520c +3:512,514c +
    get: /user/logout
    +
    User
    +
    logoutUser Logs out current logged in user session
    +====1 +1:435,437c +
    body (optional)
    + +
    Body Parameter — order placed for purchasing the pet
    +2:524a +3:518a +====1 +1:442c + +2:529c +3:523c +
    +==== +1:445,452c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.855+0000"\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.857Z\n  string\n  true\n
    + +2:532,551c + <<<<<<< HEAD + ||||||| c04f947a85 +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.855+0000"\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.857Z\n  string\n  true\n
    + + ======= +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-06T14:06:47.931+0000"\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-06T08:06:47.934Z\n  string\n  true\n
    + + >>>>>>> TEMP_RIGHT_BRANCH +3:525a +====1 +1:457,459c +
    get: /store/order/{orderId}
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +2:556,559c +3:530,533c +
    get: /user/{username}
    +
    User
    +
    getUserByName Get user by user name
    +
    +====1 +1:463c +
    orderId (required)
    +2:563c +3:537c +
    username (required)
    +====1 +1:465c +
    Path Parameter — ID of pet that needs to be fetched
    +2:565c +3:539c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +====1 +1:470c + +2:570c +3:544c + +==== +1:475c +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.859+0000"\n}
    +2:575,590c + <<<<<<< HEAD +
    {
    +    "id" : 1,
    +    "username" : "johnp",
    +    "firstName" : "John",
    +    "lastName" : "Public",
    +    "email" : "johnp@swagger.io",
    +    "password" : "-secret-",
    +    "phone" : "0123456789",
    +    "userStatus" : 0
    +  }
    + ||||||| c04f947a85 +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.859+0000"\n}
    + ======= +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-06T14:06:47.935+0000"\n}
    + >>>>>>> TEMP_RIGHT_BRANCH +3:549,558c +
    {
    +    "id" : 1,
    +    "username" : "johnp",
    +    "firstName" : "John",
    +    "lastName" : "Public",
    +    "email" : "johnp@swagger.io",
    +    "password" : "-secret-",
    +    "phone" : "0123456789",
    +    "userStatus" : 0
    +  }
    +==== +1:477,479c +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.859Z\n  string\n  true\n
    +2:592,624c + <<<<<<< HEAD +
    +
    + +
    +
    put: /user/{username}
    +
    User
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    username (required)
    + +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    + +
    Body Parameter — Updated user object
    + +
    +

    Return type

    + +
    + + ||||||| c04f947a85 +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.859Z\n  string\n  true\n
    + ======= +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-06T08:06:47.935Z\n  string\n  true\n
    + >>>>>>> TEMP_RIGHT_BRANCH +3:560,582c +
    +
    + +
    +
    put: /user/{username}
    +
    User
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    username (required)
    + +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    + +
    Body Parameter — Updated user object
    + +
    +

    Return type

    + +
    + +====1 +1:485,487c +
    delete: /store/order/{orderId}
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +2:630,633c +3:588,591c +
    delete: /user/{username}
    +
    User
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +====1 +1:491c +
    orderId (required)
    +2:637c +3:595c +
    username (required)
    +====1 +1:493c +
    Path Parameter — ID of the order that needs to be deleted
    +2:639c +3:597c +
    Path Parameter — The name that needs to be deleted
    +====1 +1:633a +2:780c +3:738c + margin-bottom: 2px; +====1 +1:714a +2:862,874c +3:820,832c + .method-tags { + text-align: right; + } + + .method-tag { + background: none repeat scroll 0% 0% #24A600; + border-radius: 3px; + padding: 2px 10px; + margin: 2px; + color: #FFF; + display: inline-block; + text-decoration: none; + } diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_minimal/diff_pom.mustache.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_minimal/diff_pom.mustache.txt new file mode 100644 index 0000000000..2accbb8523 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_minimal/diff_pom.mustache.txt @@ -0,0 +1,36 @@ +====1 +1:64a +2:65,83c +3:65,83c + + org.codehaus.mojo + build-helper-maven-plugin + 1.9.1 + + + add-source + generate-sources + + add-source + + + + src/gen/java + + + + + +==== +1:132c + 1.5.0-M2 +2:151,157c + <<<<<<< HEAD + 1.5.2-M2 + ||||||| c04f947a85 + 1.5.0-M2 + ======= + 2.1.0-M2 + >>>>>>> TEMP_RIGHT_BRANCH +3:151c + 1.5.2-M2 diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_minimal/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_minimal/diff_pom.xml.txt new file mode 100644 index 0000000000..61270e2a62 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_minimal/diff_pom.xml.txt @@ -0,0 +1,25 @@ +==== +1:6c + 1.5.0-M2 +2:6,12c + <<<<<<< HEAD + 2.1.1-M2-SNAPSHOT + ||||||| c04f947a85 + 1.5.0-M2 + ======= + 2.1.0-M2 + >>>>>>> TEMP_RIGHT_BRANCH +3:6c + 2.1.1-M2-SNAPSHOT +====1 +1:17a +2:24,31c +3:18,25c + + + src/main/resources + + logback.xml + + + diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_myers/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_myers/diff_README.md.txt new file mode 100644 index 0000000000..6e7838f2c6 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_myers/diff_README.md.txt @@ -0,0 +1,28 @@ +==== +1:21c + 1.5.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +2:21,27c + <<<<<<< HEAD + 2.1.1-M2-SNAPSHOT | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) + ||||||| c04f947a85 + 1.5.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) + ======= + 2.1.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) + >>>>>>> TEMP_RIGHT_BRANCH +3:21c + 2.1.1-M2-SNAPSHOT | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +====1 +1:152a +2:159c +3:153c + PerlClientCodegen.java +====1 +1:153a +2:161c +3:155c + Python3ClientCodegen.java +====1 +1:154a +2:163c +3:157c + Qt5CPPGenerator.java diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_myers/diff_index.html.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_myers/diff_index.html.txt new file mode 100644 index 0000000000..e0f7d570fe --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_myers/diff_index.html.txt @@ -0,0 +1,866 @@ +====1 +1:5c + +2:4a +3:4a +====1 +1:22,43c +
    post: /user
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    body (optional)
    + +
    Body Parameter — Created user object
    + +
    +

    Return type

    + +
    + + +
    +
    + +
    +
    post: /user/createWithArray
    +
    createUsersWithArrayInput Creates list of users with given input array
    +2:21,23c +3:21,23c +
    put: /pet
    +
    Pet
    +
    updatePet Update an existing pet
    +====1 +1:50c +
    Body Parameter — List of user object
    +2:30c +3:30c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:62,63c +
    post: /user/createWithList
    +
    createUsersWithListInput Creates list of users with given input array
    +2:42,44c +3:42,44c +
    post: /pet
    +
    Pet
    +
    addPet Add a new pet to the store
    +====1 +1:70c +
    Body Parameter — List of user object
    +2:51c +3:51c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:82,84c +
    get: /user/login
    +
    loginUser Logs user into the system
    +
    +2:63,66c +3:63,66c +
    get: /pet/findByStatus
    +
    Pet
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +====1 +1:88,91c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    +2:70c +3:70c +
    status (optional)
    +====1 +1:93c +
    Query Parameter — The password for login in clear text
    +2:72c +3:72c +
    Query Parameter — Status values that need to be considered for filter default: available
    +====1 +1:98c + +2:77c +3:77c + +====1 +1:103c +
    "aeiou"
    +2:82,95c +3:82,95c +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    +====1 +1:107c +
    string
    +2:99,104c +3:99,104c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:113,115c +
    get: /user/logout
    +
    logoutUser Logs out current logged in user session
    +
    +2:110,113c +3:110,113c +
    get: /pet/findByTags
    +
    Pet
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +====1 +1:118a +2:117,119c +3:117,119c +
    tags (optional)
    + +
    Query Parameter — Tags to filter by
    +====1 +1:123c +
    +2:124c +3:124c + +====1 +1:125a +2:127,152c +3:127,152c +

    Example data

    +
    Content-Type: application/json
    +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    + +

    Example data

    +
    Content-Type: application/xml
    +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    + +====1 +1:130,132c +
    get: /user/{username}
    +
    getUserByName Get user by user name
    +
    +2:157,160c +3:157,160c +
    get: /pet/{petId}
    +
    Pet
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +====1 +1:136c +
    username (required)
    +2:164c +3:164c +
    petId (required)
    +====1 +1:138c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +2:166c +3:166c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:143c + +2:171c +3:171c + +====1 +1:148c +
    {\n  "id" : 123456789,\n  "lastName" : "aeiou",\n  "phone" : "aeiou",\n  "username" : "aeiou",\n  "email" : "aeiou",\n  "userStatus" : 123,\n  "firstName" : "aeiou",\n  "password" : "aeiou"\n}
    +2:176,189c +3:176,189c +
    {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  }
    +====1 +1:152c +
    \n  123456\n  string\n  string\n  string\n  string\n  string\n  string\n  0\n
    +2:193,198c +3:193,198c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:158,160c +
    put: /user/{username}
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    +2:204,207c +3:204,207c +
    post: /pet/{petId}
    +
    Pet
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +====1 +1:164c +
    username (required)
    +2:211c +3:211c +
    petId (required)
    +====1 +1:166,167c +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    +2:213,214c +3:213,214c +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    +====1 +1:169c +
    Body Parameter — Updated user object
    +2:216,219c +3:216,219c +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    + +
    Form Parameter — Updated status of the pet
    +====1 +1:181,183c +
    delete: /user/{username}
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +2:231,234c +3:231,234c +
    delete: /pet/{petId}
    +
    Pet
    +
    deletePet Deletes a pet
    +
    +====1 +1:187c +
    username (required)
    +2:238c +3:238c +
    api_key (optional)
    +====1 +1:189c +
    Path Parameter — The name that needs to be deleted
    +2:240,243c +3:240,243c +
    Header Parameter
    +
    petId (required)
    + +
    Path Parameter — Pet id to delete
    +====1 +1:200,201c + + +2:253a +3:253a +====1 +1:203,204c +
    put: /pet
    +
    updatePet Update an existing pet
    +2:255,257c +3:255,257c +
    post: /pet/{petId}/uploadImage
    +
    Pet
    +
    uploadFile uploads an image
    +====1 +1:209c +
    body (optional)
    +2:262c +3:262c +
    petId (required)
    +====1 +1:211c +
    Body Parameter — Pet object that needs to be added to the store
    +2:264,270c +3:264,270c +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    + +
    Form Parameter — file to upload
    +====1 +1:223,225c +
    post: /pet
    +
    addPet Add a new pet to the store
    +
    +2:282,285c +3:282,285c +
    get: /store/inventory
    +
    Store
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +====1 +1:229,231c +
    body (optional)
    + +
    Body Parameter — Pet object that needs to be added to the store
    +2:288a +3:288a +====1 +1:236c +
    +2:293c +3:293c + +====1 +1:238a +2:296,305c +3:296,305c +

    Example data

    +
    Content-Type: application/json
    +
    {
    +    "key" : 123
    +  }
    + +

    Example data

    +
    Content-Type: application/xml
    +
    not implemented com.wordnik.swagger.models.properties.MapProperty@3e
    + +====1 +1:243,245c +
    get: /pet/findByStatus
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +2:310,313c +3:310,313c +
    post: /store/order
    +
    Store
    +
    placeOrder Place an order for a pet
    +
    +====1 +1:249c +
    status (optional)
    +2:317c +3:317c +
    body (optional)
    +====1 +1:251c +
    Query Parameter — Status values that need to be considered for filter
    +2:319c +3:319c +
    Body Parameter — order placed for purchasing the pet
    +====1 +1:256c + +2:324c +3:324c + +====1 +1:261c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:329,336c +3:329,336c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.118+0000"
    +  }
    +====1 +1:265c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:340,347c +3:340,347c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.120Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:271,273c +
    get: /pet/findByTags
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +2:353,356c +3:353,356c +
    get: /store/order/{orderId}
    +
    Store
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +====1 +1:277c +
    tags (optional)
    +2:360c +3:360c +
    orderId (required)
    +====1 +1:279c +
    Query Parameter — Tags to filter by
    +2:362c +3:362c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:284c + +2:367c +3:367c + +====1 +1:289c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:372,379c +3:372,379c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.121+0000"
    +  }
    +====1 +1:293c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:383,390c +3:383,390c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.122Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:299,301c +
    get: /pet/{petId}
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +2:396,399c +3:396,399c +
    delete: /store/order/{orderId}
    +
    Store
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +====1 +1:305c +
    petId (required)
    +2:403c +3:403c +
    orderId (required)
    +====1 +1:307c +
    Path Parameter — ID of pet that needs to be fetched
    +2:405c +3:405c +
    Path Parameter — ID of the order that needs to be deleted
    +====1 +1:312c + +2:410c +3:410c +
    +====1 +1:315,322c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    + +2:412a +3:412a +====1 +1:327,329c +
    post: /pet/{petId}
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +2:417,420c +3:417,420c +
    post: /user
    +
    User
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    +====1 +1:333,339c +
    petId (required)
    + +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    + +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    +2:424c +3:424c +
    body (optional)
    +====1 +1:341c +
    Form Parameter — Updated status of the pet
    +2:426c +3:426c +
    Body Parameter — Created user object
    +====1 +1:353,354c +
    delete: /pet/{petId}
    +
    deletePet Deletes a pet
    +2:438,440c +3:438,440c +
    post: /user/createWithArray
    +
    User
    +
    createUsersWithArrayInput Creates list of users with given input array
    +====1 +1:359,362c +
    api_key (optional)
    + +
    Header Parameter
    +
    petId (required)
    +2:445c +3:445c +
    body (optional)
    +====1 +1:364c +
    Path Parameter — Pet id to delete
    +2:447c +3:447c +
    Body Parameter — List of user object
    +====1 +1:376,377c +
    post: /pet/{petId}/uploadImage
    +
    uploadFile uploads an image
    +2:459,461c +3:459,461c +
    post: /user/createWithList
    +
    User
    +
    createUsersWithListInput Creates list of users with given input array
    +====1 +1:382,388c +
    petId (required)
    + +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    +2:466c +3:466c +
    body (optional)
    +====1 +1:390c +
    Form Parameter — file to upload
    +2:468c +3:468c +
    Body Parameter — List of user object
    +====1 +1:401,402c + + +2:478a +3:478a +====1 +1:404,406c +
    get: /store/inventory
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +2:480,483c +3:480,483c +
    get: /user/login
    +
    User
    +
    loginUser Logs user into the system
    +
    +====1 +1:409a +2:487,492c +3:487,492c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    + +
    Query Parameter — The password for login in clear text
    +====1 +1:414c + +2:497c +3:497c + +====1 +1:419c +
    {\n  "key" : 123\n}
    +2:502c +3:502c +
    "aeiou"
    +==== +1:423c +
    not implemented com.wordnik.swagger.models.properties.MapProperty@2acca551
    +2:506,512c + <<<<<<< HEAD +
    string
    + ||||||| c04f947a85 +
    not implemented com.wordnik.swagger.models.properties.MapProperty@2acca551
    + ======= +
    not implemented com.wordnik.swagger.models.properties.MapProperty@787b217
    + >>>>>>> TEMP_RIGHT_BRANCH +3:506c +
    string
    +====1 +1:429,430c +
    post: /store/order
    +
    placeOrder Place an order for a pet
    +2:518,520c +3:512,514c +
    get: /user/logout
    +
    User
    +
    logoutUser Logs out current logged in user session
    +====1 +1:435,437c +
    body (optional)
    + +
    Body Parameter — order placed for purchasing the pet
    +2:524a +3:518a +====1 +1:442c + +2:529c +3:523c +
    +==== +1:445,452c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.855+0000"\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.857Z\n  string\n  true\n
    + +2:532,551c + <<<<<<< HEAD + ||||||| c04f947a85 +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.855+0000"\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.857Z\n  string\n  true\n
    + + ======= +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-06T14:06:47.931+0000"\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-06T08:06:47.934Z\n  string\n  true\n
    + + >>>>>>> TEMP_RIGHT_BRANCH +3:525a +====1 +1:457,459c +
    get: /store/order/{orderId}
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +2:556,559c +3:530,533c +
    get: /user/{username}
    +
    User
    +
    getUserByName Get user by user name
    +
    +====1 +1:463c +
    orderId (required)
    +2:563c +3:537c +
    username (required)
    +====1 +1:465c +
    Path Parameter — ID of pet that needs to be fetched
    +2:565c +3:539c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +====1 +1:470c + +2:570c +3:544c + +==== +1:475c +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.859+0000"\n}
    +2:575,590c + <<<<<<< HEAD +
    {
    +    "id" : 1,
    +    "username" : "johnp",
    +    "firstName" : "John",
    +    "lastName" : "Public",
    +    "email" : "johnp@swagger.io",
    +    "password" : "-secret-",
    +    "phone" : "0123456789",
    +    "userStatus" : 0
    +  }
    + ||||||| c04f947a85 +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.859+0000"\n}
    + ======= +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-06T14:06:47.935+0000"\n}
    + >>>>>>> TEMP_RIGHT_BRANCH +3:549,558c +
    {
    +    "id" : 1,
    +    "username" : "johnp",
    +    "firstName" : "John",
    +    "lastName" : "Public",
    +    "email" : "johnp@swagger.io",
    +    "password" : "-secret-",
    +    "phone" : "0123456789",
    +    "userStatus" : 0
    +  }
    +==== +1:477,479c +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.859Z\n  string\n  true\n
    +2:592,624c + <<<<<<< HEAD +
    +
    + +
    +
    put: /user/{username}
    +
    User
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    username (required)
    + +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    + +
    Body Parameter — Updated user object
    + +
    +

    Return type

    + +
    + + ||||||| c04f947a85 +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.859Z\n  string\n  true\n
    + ======= +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-06T08:06:47.935Z\n  string\n  true\n
    + >>>>>>> TEMP_RIGHT_BRANCH +3:560,582c +
    +
    + +
    +
    put: /user/{username}
    +
    User
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    username (required)
    + +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    + +
    Body Parameter — Updated user object
    + +
    +

    Return type

    + +
    + +====1 +1:485,487c +
    delete: /store/order/{orderId}
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +2:630,633c +3:588,591c +
    delete: /user/{username}
    +
    User
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +====1 +1:491c +
    orderId (required)
    +2:637c +3:595c +
    username (required)
    +====1 +1:493c +
    Path Parameter — ID of the order that needs to be deleted
    +2:639c +3:597c +
    Path Parameter — The name that needs to be deleted
    +====1 +1:633a +2:780c +3:738c + margin-bottom: 2px; +====1 +1:714a +2:862,874c +3:820,832c + .method-tags { + text-align: right; + } + + .method-tag { + background: none repeat scroll 0% 0% #24A600; + border-radius: 3px; + padding: 2px 10px; + margin: 2px; + color: #FFF; + display: inline-block; + text-decoration: none; + } diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_myers/diff_pom.mustache.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_myers/diff_pom.mustache.txt new file mode 100644 index 0000000000..2accbb8523 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_myers/diff_pom.mustache.txt @@ -0,0 +1,36 @@ +====1 +1:64a +2:65,83c +3:65,83c + + org.codehaus.mojo + build-helper-maven-plugin + 1.9.1 + + + add-source + generate-sources + + add-source + + + + src/gen/java + + + + + +==== +1:132c + 1.5.0-M2 +2:151,157c + <<<<<<< HEAD + 1.5.2-M2 + ||||||| c04f947a85 + 1.5.0-M2 + ======= + 2.1.0-M2 + >>>>>>> TEMP_RIGHT_BRANCH +3:151c + 1.5.2-M2 diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_myers/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_myers/diff_pom.xml.txt new file mode 100644 index 0000000000..61270e2a62 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_myers/diff_pom.xml.txt @@ -0,0 +1,25 @@ +==== +1:6c + 1.5.0-M2 +2:6,12c + <<<<<<< HEAD + 2.1.1-M2-SNAPSHOT + ||||||| c04f947a85 + 1.5.0-M2 + ======= + 2.1.0-M2 + >>>>>>> TEMP_RIGHT_BRANCH +3:6c + 2.1.1-M2-SNAPSHOT +====1 +1:17a +2:24,31c +3:18,25c + + + src/main/resources + + logback.xml + + + diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_patience/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_patience/diff_README.md.txt new file mode 100644 index 0000000000..6e7838f2c6 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_patience/diff_README.md.txt @@ -0,0 +1,28 @@ +==== +1:21c + 1.5.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +2:21,27c + <<<<<<< HEAD + 2.1.1-M2-SNAPSHOT | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) + ||||||| c04f947a85 + 1.5.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) + ======= + 2.1.0-M2 | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) + >>>>>>> TEMP_RIGHT_BRANCH +3:21c + 2.1.1-M2-SNAPSHOT | 2015-04-06 | 1.0, 1.1, 1.2, 2.0 | [master](https://github.com/swagger-api/swagger-codegen) +====1 +1:152a +2:159c +3:153c + PerlClientCodegen.java +====1 +1:153a +2:161c +3:155c + Python3ClientCodegen.java +====1 +1:154a +2:163c +3:157c + Qt5CPPGenerator.java diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_patience/diff_index.html.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_patience/diff_index.html.txt new file mode 100644 index 0000000000..5d2de91649 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_patience/diff_index.html.txt @@ -0,0 +1,862 @@ +====1 +1:5c + +2:4a +3:4a +====1 +1:22,43c +
    post: /user
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    body (optional)
    + +
    Body Parameter — Created user object
    + +
    +

    Return type

    + +
    + + +
    +
    + +
    +
    post: /user/createWithArray
    +
    createUsersWithArrayInput Creates list of users with given input array
    +2:21,23c +3:21,23c +
    put: /pet
    +
    Pet
    +
    updatePet Update an existing pet
    +====1 +1:50c +
    Body Parameter — List of user object
    +2:30c +3:30c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:62,63c +
    post: /user/createWithList
    +
    createUsersWithListInput Creates list of users with given input array
    +2:42,44c +3:42,44c +
    post: /pet
    +
    Pet
    +
    addPet Add a new pet to the store
    +====1 +1:70c +
    Body Parameter — List of user object
    +2:51c +3:51c +
    Body Parameter — Pet object that needs to be added to the store
    +====1 +1:82,84c +
    get: /user/login
    +
    loginUser Logs user into the system
    +
    +2:63,66c +3:63,66c +
    get: /pet/findByStatus
    +
    Pet
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +====1 +1:88,91c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    +2:70c +3:70c +
    status (optional)
    +====1 +1:93c +
    Query Parameter — The password for login in clear text
    +2:72c +3:72c +
    Query Parameter — Status values that need to be considered for filter default: available
    +====1 +1:98c + +2:77c +3:77c + +====1 +1:103c +
    "aeiou"
    +2:82,95c +3:82,95c +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    +====1 +1:107c +
    string
    +2:99,104c +3:99,104c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:113,115c +
    get: /user/logout
    +
    logoutUser Logs out current logged in user session
    +
    +2:110,113c +3:110,113c +
    get: /pet/findByTags
    +
    Pet
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +====1 +1:118a +2:117,119c +3:117,119c +
    tags (optional)
    + +
    Query Parameter — Tags to filter by
    +====1 +1:123c +
    +2:124c +3:124c + +====1 +1:125a +2:127,152c +3:127,152c +

    Example data

    +
    Content-Type: application/json
    +
    [ {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  } ]
    + +

    Example data

    +
    Content-Type: application/xml
    +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    + +====1 +1:130,132c +
    get: /user/{username}
    +
    getUserByName Get user by user name
    +
    +2:157,160c +3:157,160c +
    get: /pet/{petId}
    +
    Pet
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +====1 +1:136c +
    username (required)
    +2:164c +3:164c +
    petId (required)
    +====1 +1:138c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +2:166c +3:166c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:143c + +2:171c +3:171c + +====1 +1:148c +
    {\n  "id" : 123456789,\n  "lastName" : "aeiou",\n  "phone" : "aeiou",\n  "username" : "aeiou",\n  "email" : "aeiou",\n  "userStatus" : 123,\n  "firstName" : "aeiou",\n  "password" : "aeiou"\n}
    +2:176,189c +3:176,189c +
    {
    +    "tags" : [ {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    } ],
    +    "id" : 123456789,
    +    "category" : {
    +      "id" : 123456789,
    +      "name" : "aeiou"
    +    },
    +    "status" : "aeiou",
    +    "name" : "doggie",
    +    "photoUrls" : [ "aeiou" ]
    +  }
    +====1 +1:152c +
    \n  123456\n  string\n  string\n  string\n  string\n  string\n  string\n  0\n
    +2:193,198c +3:193,198c +
    <Pet>
    +    <id>123456</id>
    +    <name>doggie</name>
    +    <photoUrls>string</photoUrls>
    +    <status>string</status>
    +  </Pet>
    +====1 +1:158,160c +
    put: /user/{username}
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    +2:204,207c +3:204,207c +
    post: /pet/{petId}
    +
    Pet
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +====1 +1:164c +
    username (required)
    +2:211c +3:211c +
    petId (required)
    +====1 +1:166,167c +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    +2:213,214c +3:213,214c +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    +====1 +1:169c +
    Body Parameter — Updated user object
    +2:216,219c +3:216,219c +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    + +
    Form Parameter — Updated status of the pet
    +====1 +1:181,183c +
    delete: /user/{username}
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +2:231,234c +3:231,234c +
    delete: /pet/{petId}
    +
    Pet
    +
    deletePet Deletes a pet
    +
    +====1 +1:187c +
    username (required)
    +2:238c +3:238c +
    api_key (optional)
    +====1 +1:189c +
    Path Parameter — The name that needs to be deleted
    +2:240,243c +3:240,243c +
    Header Parameter
    +
    petId (required)
    + +
    Path Parameter — Pet id to delete
    +====1 +1:200,201c + + +2:253a +3:253a +====1 +1:203,204c +
    put: /pet
    +
    updatePet Update an existing pet
    +2:255,257c +3:255,257c +
    post: /pet/{petId}/uploadImage
    +
    Pet
    +
    uploadFile uploads an image
    +====1 +1:209c +
    body (optional)
    +2:262c +3:262c +
    petId (required)
    +====1 +1:211c +
    Body Parameter — Pet object that needs to be added to the store
    +2:264,270c +3:264,270c +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    + +
    Form Parameter — file to upload
    +====1 +1:223,225c +
    post: /pet
    +
    addPet Add a new pet to the store
    +
    +2:282,285c +3:282,285c +
    get: /store/inventory
    +
    Store
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +====1 +1:229,231c +
    body (optional)
    + +
    Body Parameter — Pet object that needs to be added to the store
    +2:288a +3:288a +====1 +1:236c +
    +2:293c +3:293c + +==== +1:238a +2:296,311c +

    Example data

    +
    Content-Type: application/json
    +
    {
    +    "key" : 123
    +  }
    + +

    Example data

    +
    Content-Type: application/xml
    + <<<<<<< HEAD +
    not implemented com.wordnik.swagger.models.properties.MapProperty@3e
    + ||||||| c04f947a85 +
    not implemented com.wordnik.swagger.models.properties.MapProperty@2acca551
    + ======= +
    not implemented com.wordnik.swagger.models.properties.MapProperty@787b217
    + >>>>>>> TEMP_RIGHT_BRANCH + +3:296,305c +

    Example data

    +
    Content-Type: application/json
    +
    {
    +    "key" : 123
    +  }
    + +

    Example data

    +
    Content-Type: application/xml
    +
    not implemented com.wordnik.swagger.models.properties.MapProperty@3e
    + +====1 +1:243,245c +
    get: /pet/findByStatus
    +
    findPetsByStatus Finds Pets by status
    +
    Multiple status values can be provided with comma seperated strings
    +2:316,319c +3:310,313c +
    post: /store/order
    +
    Store
    +
    placeOrder Place an order for a pet
    +
    +====1 +1:249c +
    status (optional)
    +2:323c +3:317c +
    body (optional)
    +====1 +1:251c +
    Query Parameter — Status values that need to be considered for filter
    +2:325c +3:319c +
    Body Parameter — order placed for purchasing the pet
    +====1 +1:256c + +2:330c +3:324c + +==== +1:261c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:335,348c + <<<<<<< HEAD +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.118+0000"
    +  }
    + ||||||| c04f947a85 +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.855+0000"\n}
    + ======= +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-06T14:06:47.931+0000"\n}
    + >>>>>>> TEMP_RIGHT_BRANCH +3:329,336c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.118+0000"
    +  }
    +==== +1:265c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:352,365c + <<<<<<< HEAD +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.120Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    + ||||||| c04f947a85 +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.857Z\n  string\n  true\n
    + ======= +
    \n  123456\n  123456\n  0\n  2015-04-06T08:06:47.934Z\n  string\n  true\n
    + >>>>>>> TEMP_RIGHT_BRANCH +3:340,347c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.120Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:271,273c +
    get: /pet/findByTags
    +
    findPetsByTags Finds Pets by tags
    +
    Muliple tags can be provided with comma seperated strings. Use tag1, tag2, tag3 for testing.
    +2:371,374c +3:353,356c +
    get: /store/order/{orderId}
    +
    Store
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +====1 +1:277c +
    tags (optional)
    +2:378c +3:360c +
    orderId (required)
    +====1 +1:279c +
    Query Parameter — Tags to filter by
    +2:380c +3:362c +
    Path Parameter — ID of pet that needs to be fetched
    +====1 +1:284c + +2:385c +3:367c + +==== +1:289c +
    [ {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n} ]
    +2:390,403c + <<<<<<< HEAD +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.121+0000"
    +  }
    + ||||||| c04f947a85 +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.859+0000"\n}
    + ======= +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-06T14:06:47.935+0000"\n}
    + >>>>>>> TEMP_RIGHT_BRANCH +3:372,379c +
    {
    +    "id" : 123456789,
    +    "petId" : 123456789,
    +    "complete" : true,
    +    "status" : "aeiou",
    +    "quantity" : 123,
    +    "shipDate" : "2015-05-21T05:49:06.121+0000"
    +  }
    +==== +1:293c +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    +2:407,420c + <<<<<<< HEAD +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.122Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    + ||||||| c04f947a85 +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.859Z\n  string\n  true\n
    + ======= +
    \n  123456\n  123456\n  0\n  2015-04-06T08:06:47.935Z\n  string\n  true\n
    + >>>>>>> TEMP_RIGHT_BRANCH +3:383,390c +
    <Order>
    +    <id>123456</id>
    +    <petId>123456</petId>
    +    <quantity>0</quantity>
    +    <shipDate>2015-05-20T22:49:06.122Z</shipDate>
    +    <status>string</status>
    +    <complete>true</complete>
    +  </Order>
    +====1 +1:299,301c +
    get: /pet/{petId}
    +
    getPetById Find pet by ID
    +
    Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions
    +2:426,429c +3:396,399c +
    delete: /store/order/{orderId}
    +
    Store
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +====1 +1:305c +
    petId (required)
    +2:433c +3:403c +
    orderId (required)
    +====1 +1:307c +
    Path Parameter — ID of pet that needs to be fetched
    +2:435c +3:405c +
    Path Parameter — ID of the order that needs to be deleted
    +====1 +1:312c + +2:440c +3:410c +
    +====1 +1:315,322c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "tags" : [ {\n    "id" : 123456789,\n    "name" : "aeiou"\n  } ],\n  "id" : 123456789,\n  "category" : {\n    "id" : 123456789,\n    "name" : "aeiou"\n  },\n  "status" : "aeiou",\n  "name" : "doggie",\n  "photoUrls" : [ "aeiou" ]\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  \n    123456\n    string\n  \n  doggie\n  string\n  \n    123456\n    string\n  \n  string\n
    + +2:442a +3:412a +====1 +1:327,329c +
    post: /pet/{petId}
    +
    updatePetWithForm Updates a pet in the store with form data
    +
    +2:447,450c +3:417,420c +
    post: /user
    +
    User
    +
    createUser Create user
    +
    This can only be done by the logged in user.
    +====1 +1:333,339c +
    petId (required)
    + +
    Path Parameter — ID of pet that needs to be updated
    +
    name (optional)
    + +
    Form Parameter — Updated name of the pet
    +
    status (optional)
    +2:454c +3:424c +
    body (optional)
    +====1 +1:341c +
    Form Parameter — Updated status of the pet
    +2:456c +3:426c +
    Body Parameter — Created user object
    +====1 +1:353,354c +
    delete: /pet/{petId}
    +
    deletePet Deletes a pet
    +2:468,470c +3:438,440c +
    post: /user/createWithArray
    +
    User
    +
    createUsersWithArrayInput Creates list of users with given input array
    +====1 +1:359,362c +
    api_key (optional)
    + +
    Header Parameter
    +
    petId (required)
    +2:475c +3:445c +
    body (optional)
    +====1 +1:364c +
    Path Parameter — Pet id to delete
    +2:477c +3:447c +
    Body Parameter — List of user object
    +====1 +1:376,377c +
    post: /pet/{petId}/uploadImage
    +
    uploadFile uploads an image
    +2:489,491c +3:459,461c +
    post: /user/createWithList
    +
    User
    +
    createUsersWithListInput Creates list of users with given input array
    +====1 +1:382,388c +
    petId (required)
    + +
    Path Parameter — ID of pet to update
    +
    additionalMetadata (optional)
    + +
    Form Parameter — Additional data to pass to server
    +
    file (optional)
    +2:496c +3:466c +
    body (optional)
    +====1 +1:390c +
    Form Parameter — file to upload
    +2:498c +3:468c +
    Body Parameter — List of user object
    +====1 +1:401,402c + + +2:508a +3:478a +====1 +1:404,406c +
    get: /store/inventory
    +
    getInventory Returns pet inventories by status
    +
    Returns a map of status codes to quantities
    +2:510,513c +3:480,483c +
    get: /user/login
    +
    User
    +
    loginUser Logs user into the system
    +
    +====1 +1:409a +2:517,522c +3:487,492c +
    username (optional)
    + +
    Query Parameter — The user name for login
    +
    password (optional)
    + +
    Query Parameter — The password for login in clear text
    +====1 +1:414c + +2:527c +3:497c + +====1 +1:419c +
    {\n  "key" : 123\n}
    +2:532c +3:502c +
    "aeiou"
    +====1 +1:423c +
    not implemented com.wordnik.swagger.models.properties.MapProperty@2acca551
    +2:536c +3:506c +
    string
    +====1 +1:429,430c +
    post: /store/order
    +
    placeOrder Place an order for a pet
    +2:542,544c +3:512,514c +
    get: /user/logout
    +
    User
    +
    logoutUser Logs out current logged in user session
    +====1 +1:435,437c +
    body (optional)
    + +
    Body Parameter — order placed for purchasing the pet
    +2:548a +3:518a +====1 +1:442c + +2:553c +3:523c +
    +====1 +1:445,452c +

    Example data

    +
    Content-Type: application/json
    +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.855+0000"\n}
    + +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.857Z\n  string\n  true\n
    + +2:555a +3:525a +====1 +1:457,459c +
    get: /store/order/{orderId}
    +
    getOrderById Find purchase order by ID
    +
    For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
    +2:560,563c +3:530,533c +
    get: /user/{username}
    +
    User
    +
    getUserByName Get user by user name
    +
    +====1 +1:463c +
    orderId (required)
    +2:567c +3:537c +
    username (required)
    +====1 +1:465c +
    Path Parameter — ID of pet that needs to be fetched
    +2:569c +3:539c +
    Path Parameter — The name that needs to be fetched. Use user1 for testing.
    +====1 +1:470c + +2:574c +3:544c + +====1 +1:475c +
    {\n  "id" : 123456789,\n  "petId" : 123456789,\n  "complete" : true,\n  "status" : "aeiou",\n  "quantity" : 123,\n  "shipDate" : "2015-04-05T03:02:18.859+0000"\n}
    +2:579,588c +3:549,558c +
    {
    +    "id" : 1,
    +    "username" : "johnp",
    +    "firstName" : "John",
    +    "lastName" : "Public",
    +    "email" : "johnp@swagger.io",
    +    "password" : "-secret-",
    +    "phone" : "0123456789",
    +    "userStatus" : 0
    +  }
    +====1 +1:477,479c +

    Example data

    +
    Content-Type: application/xml
    +
    \n  123456\n  123456\n  0\n  2015-04-04T20:02:18.859Z\n  string\n  true\n
    +2:590,612c +3:560,582c +
    +
    + +
    +
    put: /user/{username}
    +
    User
    +
    updateUser Updated user
    +
    This can only be done by the logged in user.
    + +

    Parameters

    +
    +
    username (required)
    + +
    Path Parameter — name that need to be deleted
    +
    body (optional)
    + +
    Body Parameter — Updated user object
    + +
    +

    Return type

    + +
    + +====1 +1:485,487c +
    delete: /store/order/{orderId}
    +
    deleteOrder Delete purchase order by ID
    +
    For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
    +2:618,621c +3:588,591c +
    delete: /user/{username}
    +
    User
    +
    deleteUser Delete user
    +
    This can only be done by the logged in user.
    +====1 +1:491c +
    orderId (required)
    +2:625c +3:595c +
    username (required)
    +====1 +1:493c +
    Path Parameter — ID of the order that needs to be deleted
    +2:627c +3:597c +
    Path Parameter — The name that needs to be deleted
    +====1 +1:633a +2:768c +3:738c + margin-bottom: 2px; +====1 +1:714a +2:850,862c +3:820,832c + .method-tags { + text-align: right; + } + + .method-tag { + background: none repeat scroll 0% 0% #24A600; + border-radius: 3px; + padding: 2px 10px; + margin: 2px; + color: #FFF; + display: inline-block; + text-decoration: none; + } diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_patience/diff_pom.mustache.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_patience/diff_pom.mustache.txt new file mode 100644 index 0000000000..2accbb8523 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_patience/diff_pom.mustache.txt @@ -0,0 +1,36 @@ +====1 +1:64a +2:65,83c +3:65,83c + + org.codehaus.mojo + build-helper-maven-plugin + 1.9.1 + + + add-source + generate-sources + + add-source + + + + src/gen/java + + + + + +==== +1:132c + 1.5.0-M2 +2:151,157c + <<<<<<< HEAD + 1.5.2-M2 + ||||||| c04f947a85 + 1.5.0-M2 + ======= + 2.1.0-M2 + >>>>>>> TEMP_RIGHT_BRANCH +3:151c + 1.5.2-M2 diff --git a/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_patience/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_patience/diff_pom.xml.txt new file mode 100644 index 0000000000..61270e2a62 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1890/gitmerge_recursive_patience/diff_pom.xml.txt @@ -0,0 +1,25 @@ +==== +1:6c + 1.5.0-M2 +2:6,12c + <<<<<<< HEAD + 2.1.1-M2-SNAPSHOT + ||||||| c04f947a85 + 1.5.0-M2 + ======= + 2.1.0-M2 + >>>>>>> TEMP_RIGHT_BRANCH +3:6c + 2.1.1-M2-SNAPSHOT +====1 +1:17a +2:24,31c +3:18,25c + + + src/main/resources + + logback.xml + + + diff --git a/src/python/merge_conflict_analysis_diffs/1897/git_hires_merge/diff_Models.swift.txt b/src/python/merge_conflict_analysis_diffs/1897/git_hires_merge/diff_Models.swift.txt new file mode 100644 index 0000000000..8a5c872118 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/git_hires_merge/diff_Models.swift.txt @@ -0,0 +1,1866 @@ +====1 +1:14c + case Error(Int, Data?, Error) +2:14,15c +3:14,15c + case HttpError(statusCode: Int, data: Data?, error: Error) + case DecodeError(response: Data?, decodeError: DecodeError) +====1 +1:37a +2:39,60c +3:39,60c + public enum Decoded { + case success(ValueType) + case failure(DecodeError) + } + + public extension Decoded { + var value: ValueType? { + switch self { + case let .success(value): + return value + case .failure: + return nil + } + } + } + + public enum DecodeError { + case typeMismatch(expected: String, actual: String) + case missingKey(key: String) + case parseError(message: String) + } + +====1 +1:42c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> T)) { +2:65c +3:65c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> Decoded)) { +====1 +1:47,50c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> T { + let key = discriminator; + if let decoder = decoders[key] { + return decoder(source, nil) as! T +2:70,73c +3:70,73c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> Decoded { + let key = discriminator + if let decoder = decoders[key], let value = decoder(source, nil) as? Decoded { + return value +====1 +1:52c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:75c +3:75c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:56,58c + static func decode(clazz: [T].Type, source: AnyObject) -> [T] { + let array = source as! [AnyObject] + return array.map { Decoders.decode(clazz: T.self, source: $0, instance: nil) } +2:79,93c +3:79,93c + static func decode(clazz: [T].Type, source: AnyObject) -> Decoded<[T]> { + if let sourceArray = source as? [AnyObject] { + var values = [T]() + for sourceValue in sourceArray { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): + values.append(value) + case let .failure(error): + return .failure(error) + } + } + return .success(values) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } +====1 +1:61,65c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> [Key:T] { + let sourceDictionary = source as! [Key: AnyObject] + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + dictionary[key] = Decoders.decode(clazz: T.self, source: value, instance: nil) +2:96,122c +3:96,122c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> Decoded<[Key:T]> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): + dictionary[key] = value + case let .failure(error): + return .failure(error) + } + } + return .success(dictionary) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } + } + + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + guard !(source is NSNull), source != nil else { return .success(nil) } + if let value = source as? T.RawValue { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "\(T.RawValue.self) matching a case from the enumeration \(T.self)", actual: String(describing: type(of: source)))) +====1 +1:67c + return dictionary +2:123a +3:123a +====1 +1:70c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> T { +2:126c +3:126c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> Decoded { +====1 +1:72,73c + if T.self is Int32.Type && source is NSNumber { + return (source as! NSNumber).int32Value as! T; +2:128,129c +3:128,129c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int32.Type { + return .success(value) +====1 +1:75,76c + if T.self is Int64.Type && source is NSNumber { + return source.int64Value as! T; +2:131,132c +3:131,132c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int64.Type { + return .success(value) +====1 +1:78,79c + if T.self is UUID.Type && source is String { + return UUID(uuidString: source as! String) as! T +2:134,135c +3:134,135c + if let intermediate = source as? String, let value = UUID(uuidString: intermediate) as? T, source is String, T.self is UUID.Type { + return .success(value) +====1 +1:81,82c + if source is T { + return source as! T +2:137,138c +3:137,138c + if let value = source as? T { + return .success(value) +====1 +1:84,85c + if T.self is Data.Type && source is String { + return Data(base64Encoded: source as! String) as! T +2:140,141c +3:140,141c + if let intermediate = source as? String, let value = Data(base64Encoded: intermediate) as? T { + return .success(value) +====1 +1:89,90c + if let decoder = decoders[key] { + return decoder(source, instance) as! T +2:145,146c +3:145,146c + if let decoder = decoders[key], let value = decoder(source, instance) as? Decoded { + return value +====1 +1:92c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:148c +3:148c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:96,102c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> T? { + if source is NSNull { + return nil + } + return source.map { (source: AnyObject) -> T in + Decoders.decode(clazz: clazz, source: source, instance: nil) + } +2:152,154c +3:152,154c + //Convert a Decoded so that its value is optional. DO WE STILL NEED THIS? + static func toOptional(decoded: Decoded) -> Decoded { + return .success(decoded.value) +====1 +1:105,107c + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> [T]? { + if source is NSNull { + return nil +2:157,164c +3:157,164c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + if let source = source, !(source is NSNull) { + switch Decoders.decode(clazz: clazz, source: source, instance: nil) { + case let .success(value): return .success(value) + case let .failure(error): return .failure(error) + } + } else { + return .success(nil) +====1 +1:109,110c + return source.map { (someSource: AnyObject) -> [T] in + Decoders.decode(clazz: clazz, source: someSource) +2:166,179c +3:166,179c + } + + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> Decoded<[T]?> { + if let source = source as? [AnyObject] { + var values = [T]() + for sourceValue in source { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): values.append(value) + case let .failure(error): return .failure(error) + } + } + return .success(values) + } else { + return .success(nil) +====1 +1:114,116c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> [Key:T]? { + if source is NSNull { + return nil +2:183,194c +3:183,194c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> Decoded<[Key:T]?> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): dictionary[key] = value + case let .failure(error): return .failure(error) + } + } + return .success(dictionary) + } else { + return .success(nil) +====1 +1:118,119c + return source.map { (someSource: AnyObject) -> [Key:T] in + Decoders.decode(clazz: clazz, source: someSource) +2:196,206c +3:196,206c + } + + static func decodeOptional(clazz: T, source: AnyObject) -> Decoded { + if let value = source as? U { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "String", actual: String(describing: type(of: source)))) +====1 +1:122a +2:210c +3:210c + +====1 +1:137c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Date in +2:225c +3:225c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:141c + return date +2:229c +3:229c + return .success(date) +====1 +1:145c + if let sourceInt = source as? Int64 { +2:233c +3:233c + if let sourceInt = source as? Int { +====1 +1:147c + return Date(timeIntervalSince1970: Double(sourceInt / 1000) ) +2:235,250c +3:235,250c + return .success(Date(timeIntervalSince1970: Double(sourceInt / 1000) )) + } + if source is String || source is Int { + return .failure(.parseError(message: "Could not decode date")) + } else { + return .failure(.typeMismatch(expected: "String or Int", actual: "\(source)")) + } + } + + // Decoder for ISOFullDate + Decoders.addDecoder(clazz: ISOFullDate.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let string = source as? String, + let isoDate = ISOFullDate.from(string: string) { + return .success(isoDate) + } else { + return .failure(.typeMismatch(expected: "ISO date", actual: "\(source)")) +====1 +1:149c + fatalError("formatter failed to parse \(source)") +2:251a +3:251a +====1 +1:152,155c + // Decoder for [AdditionalPropertiesClass] + Decoders.addDecoder(clazz: [AdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [AdditionalPropertiesClass] in + return Decoders.decode(clazz: [AdditionalPropertiesClass].self, source: source) + } +2:253a +3:253a +====1 +1:157,163c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> AdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + + result.mapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_property"] as AnyObject?) + result.mapOfMapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_of_map_property"] as AnyObject?) + return result +2:255,273c +3:255,273c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: [String:String].self, source: sourceDictionary["map_property"] as AnyObject?) { + + case let .success(value): result.mapProperty = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_of_map_property"] as AnyObject?) { + + case let .success(value): result.mapOfMapProperty = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "AdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:167,170c + // Decoder for [Animal] + Decoders.addDecoder(clazz: [Animal].self) { (source: AnyObject, instance: AnyObject?) -> [Animal] in + return Decoders.decode(clazz: [Animal].self, source: source) + } +2:276a +3:276a +====1 +1:172,176c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Animal in + let sourceDictionary = source as! [AnyHashable: Any] + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal" { + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) +2:278,299c +3:278,299c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal"{ + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) + } + let result = instance == nil ? Animal() : instance as! Animal + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Animal", actual: "\(source)")) +====1 +1:178,182c + let result = instance == nil ? Animal() : instance as! Animal + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + return result +2:300a +3:300a +====1 +1:186,194c + // Decoder for [AnimalFarm] + Decoders.addDecoder(clazz: [AnimalFarm].self) { (source: AnyObject, instance: AnyObject?) -> [AnimalFarm] in + return Decoders.decode(clazz: [AnimalFarm].self, source: source) + } + // Decoder for AnimalFarm + Decoders.addDecoder(clazz: AnimalFarm.self) { (source: AnyObject, instance: AnyObject?) -> AnimalFarm in + let sourceArray = source as! [AnyObject] + return sourceArray.map({ Decoders.decode(clazz: Animal.self, source: $0, instance: nil) }) + } +2:303a +3:303a +====1 +1:197,200c + // Decoder for [ApiResponse] + Decoders.addDecoder(clazz: [ApiResponse].self) { (source: AnyObject, instance: AnyObject?) -> [ApiResponse] in + return Decoders.decode(clazz: [ApiResponse].self, source: source) + } +2:306c +3:306c + +====1 +1:202,209c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> ApiResponse in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + + result.code = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) + result.type = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) + result.message = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) + return result +2:308,332c +3:308,332c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) { + + case let .success(value): result.code = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) { + + case let .success(value): result.type = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) { + + case let .success(value): result.message = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ApiResponse", actual: "\(source)")) + } +====1 +1:213,216c + // Decoder for [ArrayOfArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfArrayOfNumberOnly].self, source: source) + } +2:335a +3:335a +====1 +1:218,223c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + + result.arrayArrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) + return result +2:337,349c +3:337,349c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [[Double]].self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayArrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:227,230c + // Decoder for [ArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfNumberOnly].self, source: source) + } +2:352a +3:352a +====1 +1:232,237c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + + result.arrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayNumber"] as AnyObject?) + return result +2:354,366c +3:354,366c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [Double].self, source: sourceDictionary["ArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:241,244c + // Decoder for [ArrayTest] + Decoders.addDecoder(clazz: [ArrayTest].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayTest] in + return Decoders.decode(clazz: [ArrayTest].self, source: source) + } +2:369a +3:369a +====1 +1:246,253c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> ArrayTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + + result.arrayOfString = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_of_string"] as AnyObject?) + result.arrayArrayOfInteger = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) + result.arrayArrayOfModel = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_model"] as AnyObject?) + return result +2:371,395c +3:371,395c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["array_of_string"] as AnyObject?) { + + case let .success(value): result.arrayOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[Int64]].self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[ReadOnlyFirst]].self, source: sourceDictionary["array_array_of_model"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfModel = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayTest", actual: "\(source)")) + } +====1 +1:257,260c + // Decoder for [Capitalization] + Decoders.addDecoder(clazz: [Capitalization].self) { (source: AnyObject, instance: AnyObject?) -> [Capitalization] in + return Decoders.decode(clazz: [Capitalization].self, source: source) + } +2:398a +3:398a +====1 +1:262,272c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Capitalization in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Capitalization() : instance as! Capitalization + + result.smallCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) + result.capitalCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) + result.smallSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) + result.capitalSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) + result.sCAETHFlowPoints = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) + result.ATT_NAME = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) + return result +2:400,442c +3:400,442c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Capitalization() : instance as! Capitalization + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) { + + case let .success(value): result.smallCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) { + + case let .success(value): result.capitalCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) { + + case let .success(value): result.smallSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) { + + case let .success(value): result.capitalSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) { + + case let .success(value): result.sCAETHFlowPoints = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) { + + case let .success(value): result.ATT_NAME = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Capitalization", actual: "\(source)")) + } +====1 +1:276,279c + // Decoder for [Cat] + Decoders.addDecoder(clazz: [Cat].self) { (source: AnyObject, instance: AnyObject?) -> [Cat] in + return Decoders.decode(clazz: [Cat].self, source: source) + } +2:445a +3:445a +====1 +1:281,291c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Cat in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.declawed = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) + return result +2:447,474c +3:447,474c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) { + + case let .success(value): result.declawed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Cat", actual: "\(source)")) + } +====1 +1:295,298c + // Decoder for [Category] + Decoders.addDecoder(clazz: [Category].self) { (source: AnyObject, instance: AnyObject?) -> [Category] in + return Decoders.decode(clazz: [Category].self, source: source) + } +2:477a +3:477a +====1 +1:300,306c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Category in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Category() : instance as! Category + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:479,497c +3:479,497c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Category() : instance as! Category + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Category", actual: "\(source)")) + } +====1 +1:310,313c + // Decoder for [ClassModel] + Decoders.addDecoder(clazz: [ClassModel].self) { (source: AnyObject, instance: AnyObject?) -> [ClassModel] in + return Decoders.decode(clazz: [ClassModel].self, source: source) + } +2:500a +3:500a +====1 +1:315,320c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> ClassModel in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ClassModel() : instance as! ClassModel + + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) + return result +2:502,514c +3:502,514c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ClassModel() : instance as! ClassModel + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ClassModel", actual: "\(source)")) + } +====1 +1:324,327c + // Decoder for [Client] + Decoders.addDecoder(clazz: [Client].self) { (source: AnyObject, instance: AnyObject?) -> [Client] in + return Decoders.decode(clazz: [Client].self, source: source) + } +2:517a +3:517a +====1 +1:329,334c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Client in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Client() : instance as! Client + + result.client = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) + return result +2:519,531c +3:519,531c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Client() : instance as! Client + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) { + + case let .success(value): result.client = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Client", actual: "\(source)")) + } +====1 +1:338,341c + // Decoder for [Dog] + Decoders.addDecoder(clazz: [Dog].self) { (source: AnyObject, instance: AnyObject?) -> [Dog] in + return Decoders.decode(clazz: [Dog].self, source: source) + } +2:534a +3:534a +====1 +1:343,353c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Dog in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.breed = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) + return result +2:536,563c +3:536,563c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) { + + case let .success(value): result.breed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Dog", actual: "\(source)")) + } +====1 +1:357,360c + // Decoder for [EnumArrays] + Decoders.addDecoder(clazz: [EnumArrays].self) { (source: AnyObject, instance: AnyObject?) -> [EnumArrays] in + return Decoders.decode(clazz: [EnumArrays].self, source: source) + } +2:566a +3:566a +====1 +1:362,371c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> EnumArrays in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + + if let justSymbol = sourceDictionary["just_symbol"] as? String { + result.justSymbol = EnumArrays.JustSymbol(rawValue: (justSymbol)) + } + + if let arrayEnum = sourceDictionary["array_enum"] as? [String] { + result.arrayEnum = arrayEnum.map ({ EnumArrays.ArrayEnum(rawValue: $0)! }) +2:568,585c +3:568,585c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + switch Decoders.decodeOptional(clazz: EnumArrays.JustSymbol.self, source: sourceDictionary["just_symbol"] as AnyObject?) { + + case let .success(value): result.justSymbol = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_enum"] as AnyObject?) { + + case let .success(value): result.arrayEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumArrays", actual: "\(source)")) +====1 +1:373,374c + + return result +2:586a +3:586a +====1 +1:378,381c + // Decoder for [EnumClass] + Decoders.addDecoder(clazz: [EnumClass].self) { (source: AnyObject, instance: AnyObject?) -> [EnumClass] in + return Decoders.decode(clazz: [EnumClass].self, source: source) + } +2:589a +3:589a +====1 +1:383,389c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> EnumClass in + if let source = source as? String { + if let result = EnumClass(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type EnumClass: Maybe swagger file is insufficient") +2:591,593c +3:591,593c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: EnumClass.self, source: source, instance: instance) +====1 +1:393,396c + // Decoder for [EnumTest] + Decoders.addDecoder(clazz: [EnumTest].self) { (source: AnyObject, instance: AnyObject?) -> [EnumTest] in + return Decoders.decode(clazz: [EnumTest].self, source: source) + } +2:596a +3:596a +====1 +1:398,415c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> EnumTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumTest() : instance as! EnumTest + + if let enumString = sourceDictionary["enum_string"] as? String { + result.enumString = EnumTest.EnumString(rawValue: (enumString)) + } + + if let enumInteger = sourceDictionary["enum_integer"] as? Int32 { + result.enumInteger = EnumTest.EnumInteger(rawValue: (enumInteger)) + } + + if let enumNumber = sourceDictionary["enum_number"] as? Double { + result.enumNumber = EnumTest.EnumNumber(rawValue: (enumNumber)) + } + + result.outerEnum = Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) + return result +2:598,628c +3:598,628c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumTest() : instance as! EnumTest + switch Decoders.decodeOptional(clazz: EnumTest.EnumString.self, source: sourceDictionary["enum_string"] as AnyObject?) { + + case let .success(value): result.enumString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumInteger.self, source: sourceDictionary["enum_integer"] as AnyObject?) { + + case let .success(value): result.enumInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumNumber.self, source: sourceDictionary["enum_number"] as AnyObject?) { + + case let .success(value): result.enumNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) { + + case let .success(value): result.outerEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumTest", actual: "\(source)")) + } +====1 +1:419,422c + // Decoder for [FormatTest] + Decoders.addDecoder(clazz: [FormatTest].self) { (source: AnyObject, instance: AnyObject?) -> [FormatTest] in + return Decoders.decode(clazz: [FormatTest].self, source: source) + } +2:631a +3:631a +====1 +1:424,441c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> FormatTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? FormatTest() : instance as! FormatTest + + result.integer = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) + result.int32 = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) + result.int64 = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) + result.number = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) + result.float = Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) + result.double = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) + result.string = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) + result.byte = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) + result.binary = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) + result.date = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["date"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + return result +2:633,717c +3:633,717c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? FormatTest() : instance as! FormatTest + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) { + + case let .success(value): result.integer = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) { + + case let .success(value): result.int32 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) { + + case let .success(value): result.int64 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) { + + case let .success(value): result.number = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) { + + case let .success(value): result.float = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) { + + case let .success(value): result.double = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) { + + case let .success(value): result.string = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) { + + case let .success(value): result.byte = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) { + + case let .success(value): result.binary = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: ISOFullDate.self, source: sourceDictionary["date"] as AnyObject?) { + + case let .success(value): result.date = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "FormatTest", actual: "\(source)")) + } +====1 +1:445,448c + // Decoder for [HasOnlyReadOnly] + Decoders.addDecoder(clazz: [HasOnlyReadOnly].self) { (source: AnyObject, instance: AnyObject?) -> [HasOnlyReadOnly] in + return Decoders.decode(clazz: [HasOnlyReadOnly].self, source: source) + } +2:720a +3:720a +====1 +1:450,456c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> HasOnlyReadOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.foo = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) + return result +2:722,740c +3:722,740c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) { + + case let .success(value): result.foo = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "HasOnlyReadOnly", actual: "\(source)")) + } +====1 +1:460,463c + // Decoder for [List] + Decoders.addDecoder(clazz: [List].self) { (source: AnyObject, instance: AnyObject?) -> [List] in + return Decoders.decode(clazz: [List].self, source: source) + } +2:743a +3:743a +====1 +1:465,470c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> List in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? List() : instance as! List + + result._123List = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) + return result +2:745,757c +3:745,757c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? List() : instance as! List + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) { + + case let .success(value): result._123List = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "List", actual: "\(source)")) + } +====1 +1:474,477c + // Decoder for [MapTest] + Decoders.addDecoder(clazz: [MapTest].self) { (source: AnyObject, instance: AnyObject?) -> [MapTest] in + return Decoders.decode(clazz: [MapTest].self, source: source) + } +2:760a +3:760a +====1 +1:479,484c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> MapTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MapTest() : instance as! MapTest + + result.mapMapOfString = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_map_of_string"] as AnyObject?) + if let mapOfEnumString = sourceDictionary["map_of_enum_string"] as? [String:String] { //TODO: handle enum map scenario +2:762,779c +3:762,779c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MapTest() : instance as! MapTest + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_map_of_string"] as AnyObject?) { + + case let .success(value): result.mapMapOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: MapTest.MapOfEnumString.self, source: sourceDictionary["map_of_enum_string"] as AnyObject?) { + /* + case let .success(value): result.mapOfEnumString = value + case let .failure(error): return .failure(error) + */ default: break //TODO: handle enum map scenario + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MapTest", actual: "\(source)")) +====1 +1:486,487c + + return result +2:780a +3:780a +====1 +1:491,494c + // Decoder for [MixedPropertiesAndAdditionalPropertiesClass] + Decoders.addDecoder(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [MixedPropertiesAndAdditionalPropertiesClass] in + return Decoders.decode(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self, source: source) + } +2:783a +3:783a +====1 +1:496,503c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> MixedPropertiesAndAdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.map = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map"] as AnyObject?) + return result +2:785,809c +3:785,809c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:Animal].self, source: sourceDictionary["map"] as AnyObject?) { + + case let .success(value): result.map = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MixedPropertiesAndAdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:507,510c + // Decoder for [Model200Response] + Decoders.addDecoder(clazz: [Model200Response].self) { (source: AnyObject, instance: AnyObject?) -> [Model200Response] in + return Decoders.decode(clazz: [Model200Response].self, source: source) + } +2:812a +3:812a +====1 +1:512,518c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Model200Response in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Model200Response() : instance as! Model200Response + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) + return result +2:814,832c +3:814,832c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Model200Response() : instance as! Model200Response + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Model200Response", actual: "\(source)")) + } +====1 +1:522,525c + // Decoder for [Name] + Decoders.addDecoder(clazz: [Name].self) { (source: AnyObject, instance: AnyObject?) -> [Name] in + return Decoders.decode(clazz: [Name].self, source: source) + } +2:835a +3:835a +====1 +1:527,535c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Name in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Name() : instance as! Name + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result.snakeCase = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) + result.property = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) + result._123Number = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) + return result +2:837,867c +3:837,867c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Name() : instance as! Name + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) { + + case let .success(value): result.snakeCase = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) { + + case let .success(value): result.property = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) { + + case let .success(value): result._123Number = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Name", actual: "\(source)")) + } +====1 +1:539,542c + // Decoder for [NumberOnly] + Decoders.addDecoder(clazz: [NumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [NumberOnly] in + return Decoders.decode(clazz: [NumberOnly].self, source: source) + } +2:870a +3:870a +====1 +1:544,549c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> NumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + + result.justNumber = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) + return result +2:872,884c +3:872,884c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) { + + case let .success(value): result.justNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "NumberOnly", actual: "\(source)")) + } +====1 +1:553,556c + // Decoder for [Order] + Decoders.addDecoder(clazz: [Order].self) { (source: AnyObject, instance: AnyObject?) -> [Order] in + return Decoders.decode(clazz: [Order].self, source: source) + } +2:887a +3:887a +====1 +1:558,571c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Order in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Order() : instance as! Order + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.petId = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) + result.quantity = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) + result.shipDate = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Order.Status(rawValue: (status)) + } + + result.complete = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) + return result +2:889,931c +3:889,931c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Order() : instance as! Order + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) { + + case let .success(value): result.petId = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) { + + case let .success(value): result.quantity = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) { + + case let .success(value): result.shipDate = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Order.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) { + + case let .success(value): result.complete = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Order", actual: "\(source)")) + } +====1 +1:575,578c + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } +2:934a +3:934a +====1 +1:580c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject) -> OuterBoolean in +2:936c +3:936c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:582c + return source +2:938,940c +3:938,940c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterBoolean", actual: "\(source)")) +====1 +1:584c + fatalError("Source \(source) is not convertible to typealias OuterBoolean: Maybe swagger file is insufficient") +2:941a +3:941a +====1 +1:588,591c + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } +2:944a +3:944a +====1 +1:593,600c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + + let instance = OuterComposite() + instance.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + instance.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + instance.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return instance +2:946,970c +3:946,970c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + switch Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) { + + case let .success(value): result.myNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) { + + case let .success(value): result.myString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) { + + case let .success(value): result.myBoolean = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "OuterComposite", actual: "\(source)")) + } +====1 +1:604,607c + // Decoder for [OuterEnum] + Decoders.addDecoder(clazz: [OuterEnum].self) { (source: AnyObject, instance: AnyObject?) -> [OuterEnum] in + return Decoders.decode(clazz: [OuterEnum].self, source: source) + } +2:973a +3:973a +====1 +1:609,615c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> OuterEnum in + if let source = source as? String { + if let result = OuterEnum(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type OuterEnum: Maybe swagger file is insufficient") +2:975,977c +3:975,977c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: OuterEnum.self, source: source, instance: instance) +====1 +1:619,622c + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + } +2:980a +3:980a +====1 +1:624c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject) -> OuterNumber in +2:982c +3:982c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:626c + return source +2:984,986c +3:984,986c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterNumber", actual: "\(source)")) +====1 +1:628c + fatalError("Source \(source) is not convertible to typealias OuterNumber: Maybe swagger file is insufficient") +2:987a +3:987a +====1 +1:632,635c + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } +2:990a +3:990a +====1 +1:637c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject) -> OuterString in +2:992c +3:992c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:639c + return source +2:994,996c +3:994,996c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterString", actual: "\(source)")) +====1 +1:641c + fatalError("Source \(source) is not convertible to typealias OuterString: Maybe swagger file is insufficient") +2:997a +3:997a +====1 +1:645,648c + // Decoder for [Pet] + Decoders.addDecoder(clazz: [Pet].self) { (source: AnyObject, instance: AnyObject?) -> [Pet] in + return Decoders.decode(clazz: [Pet].self, source: source) + } +2:1000a +3:1000a +====1 +1:650,663c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Pet in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Pet() : instance as! Pet + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.category = Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + result.photoUrls = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["photoUrls"] as AnyObject?) + result.tags = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["tags"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Pet.Status(rawValue: (status)) + } + + return result +2:1002,1044c +3:1002,1044c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Pet() : instance as! Pet + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) { + + case let .success(value): result.category = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["photoUrls"] as AnyObject?) { + + case let .success(value): result.photoUrls = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [Tag].self, source: sourceDictionary["tags"] as AnyObject?) { + + case let .success(value): result.tags = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Pet.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Pet", actual: "\(source)")) + } +====1 +1:667,670c + // Decoder for [ReadOnlyFirst] + Decoders.addDecoder(clazz: [ReadOnlyFirst].self) { (source: AnyObject, instance: AnyObject?) -> [ReadOnlyFirst] in + return Decoders.decode(clazz: [ReadOnlyFirst].self, source: source) + } +2:1047a +3:1047a +====1 +1:672,678c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> ReadOnlyFirst in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.baz = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) + return result +2:1049,1067c +3:1049,1067c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) { + + case let .success(value): result.baz = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ReadOnlyFirst", actual: "\(source)")) + } +====1 +1:682,685c + // Decoder for [Return] + Decoders.addDecoder(clazz: [Return].self) { (source: AnyObject, instance: AnyObject?) -> [Return] in + return Decoders.decode(clazz: [Return].self, source: source) + } +2:1070a +3:1070a +====1 +1:687,692c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Return in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Return() : instance as! Return + + result._return = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) + return result +2:1072,1084c +3:1072,1084c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Return() : instance as! Return + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) { + + case let .success(value): result._return = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Return", actual: "\(source)")) + } +====1 +1:696,699c + // Decoder for [SpecialModelName] + Decoders.addDecoder(clazz: [SpecialModelName].self) { (source: AnyObject, instance: AnyObject?) -> [SpecialModelName] in + return Decoders.decode(clazz: [SpecialModelName].self, source: source) + } +2:1087a +3:1087a +====1 +1:701,706c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> SpecialModelName in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + + result.specialPropertyName = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) + return result +2:1089,1101c +3:1089,1101c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) { + + case let .success(value): result.specialPropertyName = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "SpecialModelName", actual: "\(source)")) + } +====1 +1:710,713c + // Decoder for [Tag] + Decoders.addDecoder(clazz: [Tag].self) { (source: AnyObject, instance: AnyObject?) -> [Tag] in + return Decoders.decode(clazz: [Tag].self, source: source) + } +2:1104a +3:1104a +====1 +1:715,721c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Tag in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Tag() : instance as! Tag + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:1106,1124c +3:1106,1124c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Tag() : instance as! Tag + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Tag", actual: "\(source)")) + } +====1 +1:725,728c + // Decoder for [User] + Decoders.addDecoder(clazz: [User].self) { (source: AnyObject, instance: AnyObject?) -> [User] in + return Decoders.decode(clazz: [User].self, source: source) + } +2:1127a +3:1127a +====1 +1:730,742c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> User in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? User() : instance as! User + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.username = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) + result.firstName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) + result.lastName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) + result.email = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + result.phone = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) + result.userStatus = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) + return result +2:1129,1183c +3:1129,1183c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? User() : instance as! User + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) { + + case let .success(value): result.username = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) { + + case let .success(value): result.firstName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) { + + case let .success(value): result.lastName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) { + + case let .success(value): result.email = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) { + + case let .success(value): result.phone = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) { + + case let .success(value): result.userStatus = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "User", actual: "\(source)")) + } +====1 +1:749c + } +\ No newline at end of file +2:1190c +3:1190c + } diff --git a/src/python/merge_conflict_analysis_diffs/1897/git_hires_merge/diff_PetApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/git_hires_merge/diff_PetApi.php.txt new file mode 100644 index 0000000000..9eb97a1d7a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/git_hires_merge/diff_PetApi.php.txt @@ -0,0 +1,1642 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return PetApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->addPetWithHttpInfo($body); + return $response; +2:96c +3:96c + $this->addPetWithHttpInfo($body); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/pet"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/pet'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/json', 'application/xml']); +2:120,124c +3:120,124c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:140a +2:135c +3:135c + +====1 +1:142c + $httpBody = $formParams; // for HTTP post (form) +2:137,149c +3:137,149c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:143a +2:151,162c +3:151,162c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/json', 'application/xml'] + ); + } + +====1 +1:145,146c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:164,165c +3:164,165c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:148c + // make the API Call +2:167,188c +3:167,188c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:150,158c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet' + ); +2:189a +3:189a +====1 +1:160c + return [null, $statusCode, $httpHeader]; +2:191,213c +3:191,213c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:164c + +2:216a +3:216a +====1 +1:168c + +2:219a +3:219a +====1 +1:176a +2:228c +3:228c + * @throws \InvalidArgumentException +====1 +1:181,182c + list($response) = $this->deletePetWithHttpInfo($pet_id, $api_key); + return $response; +2:233c +3:233c + $this->deletePetWithHttpInfo($pet_id, $api_key); +====1 +1:192a +2:244c +3:244c + * @throws \InvalidArgumentException +====1 +1:201,203c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:253,255c +3:253,255c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:206,211c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:258,260c +3:258,260c + $httpBody = ''; + $multipart = false; + $returnType = ''; +====1 +1:215c + $headerParams['api_key'] = $this->apiClient->getSerializer()->toHeaderValue($api_key); +2:264c +3:264c + $headerParams['api_key'] = ObjectSerializer::toHeaderValue($api_key); +====1 +1:216a +2:266c +3:266c + +====1 +1:219,223c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:269c +3:269c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +==== +1:225c + +2:271,272c + + +3:271,272c + + +====1 +1:228a +2:276c +3:276c + +====1 +1:230c + $httpBody = $formParams; // for HTTP post (form) +2:278,301c +3:278,301c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:231a +2:303c +3:303c + +====1 +1:233,234c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:305,306c +3:305,306c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:236c + // make the API Call +2:308,329c +3:308,329c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + +====1 +1:238,246c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet/{petId}' + ); +2:330a +3:330a +====1 +1:248c + return [null, $statusCode, $httpHeader]; +2:332,354c +3:332,354c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:252c + +2:357a +3:357a +====1 +1:256c + +2:360a +3:360a +====1 +1:263a +2:368c +3:368c + * @throws \InvalidArgumentException +====1 +1:278a +2:384c +3:384c + * @throws \InvalidArgumentException +====1 +1:287,289c + // parse inputs + $resourcePath = "/pet/findByStatus"; + $httpBody = ''; +2:393,395c +3:393,395c + + $resourcePath = '/pet/findByStatus'; + $formParams = []; +====1 +1:292,297c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:398,400c +3:398,400c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet[]'; +====1 +1:301c + $status = $this->apiClient->getSerializer()->serializeCollection($status, 'csv', true); +2:404c +3:404c + $status = ObjectSerializer::serializeCollection($status, 'csv', true); +====1 +1:304c + $queryParams['status'] = $this->apiClient->getSerializer()->toQueryValue($status); +2:407c +3:407c + $queryParams['status'] = ObjectSerializer::toQueryValue($status); +==== +1:306c + +2:409,411c + + + +3:409,411c + + + +====1 +1:309a +2:415c +3:415c + +====1 +1:311c + $httpBody = $formParams; // for HTTP post (form) +2:417,440c +3:417,440c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:312a +2:442c +3:442c + +====1 +1:314,315c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:444,445c +3:444,445c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:317c + // make the API Call +2:447,468c +3:447,468c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:319,327c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet[]', + '/pet/findByStatus' + ); +2:469a +3:469a +====1 +1:329c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet[]', $httpHeader), $statusCode, $httpHeader]; +2:471,507c +3:471,507c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:333c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +2:511c +3:511c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +====1 +1:337c + +2:514a +3:514a +====1 +1:341c + +2:517a +3:517a +====1 +1:348a +2:525c +3:525c + * @throws \InvalidArgumentException +====1 +1:363a +2:541c +3:541c + * @throws \InvalidArgumentException +====1 +1:372,374c + // parse inputs + $resourcePath = "/pet/findByTags"; + $httpBody = ''; +2:550,552c +3:550,552c + + $resourcePath = '/pet/findByTags'; + $formParams = []; +====1 +1:377,382c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:555,557c +3:555,557c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet[]'; +====1 +1:386c + $tags = $this->apiClient->getSerializer()->serializeCollection($tags, 'csv', true); +2:561c +3:561c + $tags = ObjectSerializer::serializeCollection($tags, 'csv', true); +====1 +1:389c + $queryParams['tags'] = $this->apiClient->getSerializer()->toQueryValue($tags); +2:564c +3:564c + $queryParams['tags'] = ObjectSerializer::toQueryValue($tags); +==== +1:391c + +2:566,568c + + + +3:566,568c + + + +====1 +1:394a +2:572c +3:572c + +====1 +1:396c + $httpBody = $formParams; // for HTTP post (form) +2:574,586c +3:574,586c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:397a +2:588,599c +3:588,599c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + +====1 +1:399,400c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:601,602c +3:601,602c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:402c + // make the API Call +2:604,625c +3:604,625c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:404,412c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet[]', + '/pet/findByTags' + ); +2:626a +3:626a +====1 +1:414c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet[]', $httpHeader), $statusCode, $httpHeader]; +2:628,664c +3:628,664c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:418c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +2:668c +3:668c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +====1 +1:422c + +2:671a +3:671a +====1 +1:426c + +2:674a +3:674a +====1 +1:433a +2:682c +3:682c + * @throws \InvalidArgumentException +====1 +1:448a +2:698c +3:698c + * @throws \InvalidArgumentException +====1 +1:457,459c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:707,709c +3:707,709c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:462,467c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:712,715c +3:712,715c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet'; + +====1 +1:471,475c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:719c +3:719c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +==== +1:477c + +2:721,722c + + +3:721,722c + + +====1 +1:480a +2:726c +3:726c + +====1 +1:482c + $httpBody = $formParams; // for HTTP post (form) +2:728,740c +3:728,740c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:484,487c + // this endpoint requires API key authentication + $apiKey = $this->apiClient->getApiKeyWithPrefix('api_key'); + if (strlen($apiKey) !== 0) { + $headerParams['api_key'] = $apiKey; +2:742,751c +3:742,751c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:489c + // make the API Call +2:753,780c +3:753,780c + + // this endpoint requires API key authentication + $apiKey = $this->config->getApiKeyWithPrefix('api_key'); + if ($apiKey !== null) { + $headers['api_key'] = $apiKey; + } + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:491,499c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet', + '/pet/{petId}' + ); +2:781a +3:781a +====1 +1:501c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet', $httpHeader), $statusCode, $httpHeader]; +2:783,819c +3:783,819c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:505c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet', $e->getResponseHeaders()); +2:823c +3:823c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet', $e->getResponseHeaders()); +====1 +1:509c + +2:826a +3:826a +====1 +1:513c + +2:829a +3:829a +====1 +1:520a +2:837c +3:837c + * @throws \InvalidArgumentException +====1 +1:525,526c + list($response) = $this->updatePetWithHttpInfo($body); + return $response; +2:842c +3:842c + $this->updatePetWithHttpInfo($body); +====1 +1:535a +2:852c +3:852c + * @throws \InvalidArgumentException +====1 +1:544,546c + // parse inputs + $resourcePath = "/pet"; + $httpBody = ''; +2:861,863c +3:861,863c + + $resourcePath = '/pet'; + $formParams = []; +====1 +1:549,554c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/json', 'application/xml']); +2:866,870c +3:866,870c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:564a +2:881c +3:881c + +====1 +1:566c + $httpBody = $formParams; // for HTTP post (form) +2:883,895c +3:883,895c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:567a +2:897,908c +3:897,908c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/json', 'application/xml'] + ); + } + +====1 +1:569,570c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:910,911c +3:910,911c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:572c + // make the API Call +2:913,934c +3:913,934c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'PUT', + $url, + $headers, + $httpBody + ); + +====1 +1:574,582c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'PUT', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet' + ); +2:935a +3:935a +====1 +1:584c + return [null, $statusCode, $httpHeader]; +2:937,959c +3:937,959c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:588c + +2:962a +3:962a +====1 +1:592c + +2:965a +3:965a +====1 +1:601a +2:975c +3:975c + * @throws \InvalidArgumentException +====1 +1:606,607c + list($response) = $this->updatePetWithFormWithHttpInfo($pet_id, $name, $status); + return $response; +2:980c +3:980c + $this->updatePetWithFormWithHttpInfo($pet_id, $name, $status); +====1 +1:618a +2:992c +3:992c + * @throws \InvalidArgumentException +====1 +1:627,629c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:1001,1003c +3:1001,1003c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:632,637c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/x-www-form-urlencoded']); +2:1006,1009c +3:1006,1009c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:641,645c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:1013c +3:1013c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +====1 +1:646a +2:1015c +3:1015c + +====1 +1:649c + $formParams['name'] = $this->apiClient->getSerializer()->toFormValue($name); +2:1018c +3:1018c + $formParams['name'] = ObjectSerializer::toFormValue($name); +====1 +1:653c + $formParams['status'] = $this->apiClient->getSerializer()->toFormValue($status); +2:1022c +3:1022c + $formParams['status'] = ObjectSerializer::toFormValue($status); +====3 +1:655c +2:1024c + +3:1024c + +====1 +1:658a +2:1028c +3:1028c + +====1 +1:660c + $httpBody = $formParams; // for HTTP post (form) +2:1030,1053c +3:1030,1053c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/x-www-form-urlencoded'] + ); +====1 +1:661a +2:1055c +3:1055c + +====1 +1:663,664c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:1057,1058c +3:1057,1058c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:666c + // make the API Call +2:1060,1081c +3:1060,1081c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:668,676c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet/{petId}' + ); +2:1082a +3:1082a +====1 +1:678c + return [null, $statusCode, $httpHeader]; +2:1084,1106c +3:1084,1106c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:682c + +2:1109a +3:1109a +====1 +1:686c + +2:1112a +3:1112a +====1 +1:695a +2:1122c +3:1122c + * @throws \InvalidArgumentException +====1 +1:712a +2:1140c +3:1140c + * @throws \InvalidArgumentException +====1 +1:721,723c + // parse inputs + $resourcePath = "/pet/{petId}/uploadImage"; + $httpBody = ''; +2:1149,1151c +3:1149,1151c + + $resourcePath = '/pet/{petId}/uploadImage'; + $formParams = []; +====1 +1:726,731c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['multipart/form-data']); +2:1154,1157c +3:1154,1157c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\ApiResponse'; + +====1 +1:735,739c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:1161c +3:1161c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +====1 +1:740a +2:1163c +3:1163c + +====1 +1:743c + $formParams['additionalMetadata'] = $this->apiClient->getSerializer()->toFormValue($additional_metadata); +2:1166c +3:1166c + $formParams['additionalMetadata'] = ObjectSerializer::toFormValue($additional_metadata); +====1 +1:747,753c + // PHP 5.5 introduced a CurlFile object that deprecates the old @filename syntax + // See: https://wiki.php.net/rfc/curl-file-upload + if (function_exists('curl_file_create')) { + $formParams['file'] = curl_file_create($this->apiClient->getSerializer()->toFormValue($file)); + } else { + $formParams['file'] = '@' . $this->apiClient->getSerializer()->toFormValue($file); + } +2:1170,1171c +3:1170,1171c + $multipart = true; + $formParams['file'] = \GuzzleHttp\Psr7\try_fopen(ObjectSerializer::toFormValue($file), 'rb'); +====3 +1:755c +2:1173c + +3:1173c + +====1 +1:758a +2:1177c +3:1177c + +====1 +1:760c + $httpBody = $formParams; // for HTTP post (form) +2:1179,1191c +3:1179,1191c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:761a +2:1193,1204c +3:1193,1204c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/json'], + ['multipart/form-data'] + ); + } + +====1 +1:763,764c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:1206,1207c +3:1206,1207c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:766c + // make the API Call +2:1209,1230c +3:1209,1230c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:768,776c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\ApiResponse', + '/pet/{petId}/uploadImage' + ); +2:1231a +3:1231a +====1 +1:778c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\ApiResponse', $httpHeader), $statusCode, $httpHeader]; +2:1233,1269c +3:1233,1269c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:782c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\ApiResponse', $e->getResponseHeaders()); +2:1273c +3:1273c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\ApiResponse', $e->getResponseHeaders()); +====1 +1:786c + +2:1276a +3:1276a diff --git a/src/python/merge_conflict_analysis_diffs/1897/git_hires_merge/diff_StoreApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/git_hires_merge/diff_StoreApi.php.txt new file mode 100644 index 0000000000..d45a4bcd95 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/git_hires_merge/diff_StoreApi.php.txt @@ -0,0 +1,825 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return StoreApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->deleteOrderWithHttpInfo($order_id); + return $response; +2:96c +3:96c + $this->deleteOrderWithHttpInfo($order_id); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/store/order/{order_id}"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/store/order/{order_id}'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:120,123c +3:120,123c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:134,138c + $resourcePath = str_replace( + "{" . "order_id" . "}", + $this->apiClient->getSerializer()->toPathValue($order_id), + $resourcePath + ); +2:127c +3:127c + $resourcePath = str_replace('{' . 'order_id' . '}', ObjectSerializer::toPathValue($order_id), $resourcePath); +==== +1:140c + +2:129,130c + + +3:129,130c + + +====1 +1:143a +2:134c +3:134c + +====1 +1:145c + $httpBody = $formParams; // for HTTP post (form) +2:136,148c +3:136,148c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:147,156c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/store/order/{order_id}' +2:150,158c +3:150,158c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:157a +2:160,207c +3:160,207c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:159c + return [null, $statusCode, $httpHeader]; +2:208a +3:208a +====1 +1:163c + +2:211a +3:211a +====1 +1:167c + +2:214a +3:214a +====1 +1:173a +2:221c +3:221c + * @throws \InvalidArgumentException +====1 +1:187a +2:236c +3:236c + * @throws \InvalidArgumentException +====1 +1:192,194c + // parse inputs + $resourcePath = "/store/inventory"; + $httpBody = ''; +2:241,243c +3:241,243c + + $resourcePath = '/store/inventory'; + $formParams = []; +==== +1:197,204c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); + + +2:246,252c + $httpBody = ''; + $multipart = false; + $returnType = 'map[string,int]'; + + + + +3:246,252c + $httpBody = ''; + $multipart = false; + $returnType = 'map[string,int]'; + + + + +====1 +1:207a +2:256c +3:256c + +====1 +1:209c + $httpBody = $formParams; // for HTTP post (form) +2:258,270c +3:258,270c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:211,214c + // this endpoint requires API key authentication + $apiKey = $this->apiClient->getApiKeyWithPrefix('api_key'); + if (strlen($apiKey) !== 0) { + $headerParams['api_key'] = $apiKey; +2:272,281c +3:272,281c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/json'], + [] + ); +====1 +1:216c + // make the API Call +2:283,310c +3:283,310c + + // this endpoint requires API key authentication + $apiKey = $this->config->getApiKeyWithPrefix('api_key'); + if ($apiKey !== null) { + $headers['api_key'] = $apiKey; + } + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:218,226c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + 'map[string,int]', + '/store/inventory' + ); +2:311a +3:311a +====1 +1:228c + return [$this->apiClient->getSerializer()->deserialize($response, 'map[string,int]', $httpHeader), $statusCode, $httpHeader]; +2:313,349c +3:313,349c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:232c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), 'map[string,int]', $e->getResponseHeaders()); +2:353c +3:353c + $data = ObjectSerializer::deserialize($e->getResponseBody(), 'map[string,int]', $e->getResponseHeaders()); +====1 +1:236c + +2:356a +3:356a +====1 +1:240c + +2:359a +3:359a +====1 +1:247a +2:367c +3:367c + * @throws \InvalidArgumentException +====1 +1:262a +2:383c +3:383c + * @throws \InvalidArgumentException +====1 +1:271c + if (($order_id > 5)) { +2:392c +3:392c + if ($order_id > 5) { +====1 +1:274c + if (($order_id < 1)) { +2:395c +3:395c + if ($order_id < 1) { +====1 +1:278,280c + // parse inputs + $resourcePath = "/store/order/{order_id}"; + $httpBody = ''; +2:399,401c +3:399,401c + + $resourcePath = '/store/order/{order_id}'; + $formParams = []; +====1 +1:283,288c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:404,407c +3:404,407c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Order'; + +====1 +1:292,296c + $resourcePath = str_replace( + "{" . "order_id" . "}", + $this->apiClient->getSerializer()->toPathValue($order_id), + $resourcePath + ); +2:411c +3:411c + $resourcePath = str_replace('{' . 'order_id' . '}', ObjectSerializer::toPathValue($order_id), $resourcePath); +==== +1:298c + +2:413,414c + + +3:413,414c + + +====1 +1:301a +2:418c +3:418c + +====1 +1:303c + $httpBody = $formParams; // for HTTP post (form) +2:420,432c +3:420,432c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:305,314c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Order', + '/store/order/{order_id}' +2:434,442c +3:434,442c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:315a +2:444,505c +3:444,505c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:317c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Order', $httpHeader), $statusCode, $httpHeader]; +2:506a +3:506a +====1 +1:321c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +2:510c +3:510c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +====1 +1:325c + +2:513a +3:513a +====1 +1:329c + +2:516a +3:516a +====1 +1:336a +2:524c +3:524c + * @throws \InvalidArgumentException +====1 +1:351a +2:540c +3:540c + * @throws \InvalidArgumentException +====1 +1:360,362c + // parse inputs + $resourcePath = "/store/order"; + $httpBody = ''; +2:549,551c +3:549,551c + + $resourcePath = '/store/order'; + $formParams = []; +====1 +1:365,370c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:554,558c +3:554,558c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Order'; + + +====1 +1:380a +2:569c +3:569c + +====1 +1:382c + $httpBody = $formParams; // for HTTP post (form) +2:571,583c +3:571,583c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:384,393c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Order', + '/store/order' +2:585,588c +3:585,588c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:394a +2:590,656c +3:590,656c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:396c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Order', $httpHeader), $statusCode, $httpHeader]; +2:657a +3:657a +====1 +1:400c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +2:661c +3:661c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +====1 +1:404c + +2:664a +3:664a diff --git a/src/python/merge_conflict_analysis_diffs/1897/git_hires_merge/diff_UserApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/git_hires_merge/diff_UserApi.php.txt new file mode 100644 index 0000000000..f70f624185 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/git_hires_merge/diff_UserApi.php.txt @@ -0,0 +1,1443 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return UserApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->createUserWithHttpInfo($body); + return $response; +2:96c +3:96c + $this->createUserWithHttpInfo($body); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/user"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/user'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:120,124c +3:120,124c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:140a +2:135c +3:135c + +====1 +1:142c + $httpBody = $formParams; // for HTTP post (form) +2:137,149c +3:137,149c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:144,153c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user' +2:151,154c +3:151,154c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:154a +2:156,208c +3:156,208c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:156c + return [null, $statusCode, $httpHeader]; +2:209a +3:209a +====1 +1:160c + +2:212a +3:212a +====1 +1:164c + +2:215a +3:215a +====1 +1:171a +2:223c +3:223c + * @throws \InvalidArgumentException +====1 +1:176,177c + list($response) = $this->createUsersWithArrayInputWithHttpInfo($body); + return $response; +2:228c +3:228c + $this->createUsersWithArrayInputWithHttpInfo($body); +====1 +1:186a +2:238c +3:238c + * @throws \InvalidArgumentException +====1 +1:195,197c + // parse inputs + $resourcePath = "/user/createWithArray"; + $httpBody = ''; +2:247,249c +3:247,249c + + $resourcePath = '/user/createWithArray'; + $formParams = []; +====1 +1:200,205c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:252,256c +3:252,256c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:215a +2:267c +3:267c + +====1 +1:217c + $httpBody = $formParams; // for HTTP post (form) +2:269,281c +3:269,281c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:219,228c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/createWithArray' +2:283,291c +3:283,291c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:229a +2:293,340c +3:293,340c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:231c + return [null, $statusCode, $httpHeader]; +2:341a +3:341a +====1 +1:235c + +2:344a +3:344a +====1 +1:239c + +2:347a +3:347a +====1 +1:246a +2:355c +3:355c + * @throws \InvalidArgumentException +====1 +1:251,252c + list($response) = $this->createUsersWithListInputWithHttpInfo($body); + return $response; +2:360c +3:360c + $this->createUsersWithListInputWithHttpInfo($body); +====1 +1:261a +2:370c +3:370c + * @throws \InvalidArgumentException +====1 +1:270,272c + // parse inputs + $resourcePath = "/user/createWithList"; + $httpBody = ''; +2:379,381c +3:379,381c + + $resourcePath = '/user/createWithList'; + $formParams = []; +====1 +1:275,280c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:384,388c +3:384,388c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:290a +2:399c +3:399c + +====1 +1:292c + $httpBody = $formParams; // for HTTP post (form) +2:401,413c +3:401,413c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:294,303c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/createWithList' +2:415,423c +3:415,423c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:304a +2:425,472c +3:425,472c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:306c + return [null, $statusCode, $httpHeader]; +2:473a +3:473a +====1 +1:310c + +2:476a +3:476a +====1 +1:314c + +2:479a +3:479a +====1 +1:321a +2:487c +3:487c + * @throws \InvalidArgumentException +====1 +1:326,327c + list($response) = $this->deleteUserWithHttpInfo($username); + return $response; +2:492c +3:492c + $this->deleteUserWithHttpInfo($username); +====1 +1:336a +2:502c +3:502c + * @throws \InvalidArgumentException +====1 +1:345,347c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:511,513c +3:511,513c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:350,355c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:516,519c +3:516,519c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:359,363c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:523c +3:523c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +==== +1:365c + +2:525,526c + + +3:525,526c + + +====1 +1:368a +2:530c +3:530c + +====1 +1:370c + $httpBody = $formParams; // for HTTP post (form) +2:532,544c +3:532,544c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:372,381c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/{username}' +2:546,554c +3:546,554c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:382a +2:556,603c +3:556,603c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:384c + return [null, $statusCode, $httpHeader]; +2:604a +3:604a +====1 +1:388c + +2:607a +3:607a +====1 +1:392c + +2:610a +3:610a +====1 +1:399a +2:618c +3:618c + * @throws \InvalidArgumentException +====1 +1:414a +2:634c +3:634c + * @throws \InvalidArgumentException +====1 +1:423,425c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:643,645c +3:643,645c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:428,433c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:648,651c +3:648,651c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\User'; + +====1 +1:437,441c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:655c +3:655c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +==== +1:443c + +2:657,658c + + +3:657,658c + + +====1 +1:446a +2:662c +3:662c + +====1 +1:448c + $httpBody = $formParams; // for HTTP post (form) +2:664,676c +3:664,676c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:450,459c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\User', + '/user/{username}' +2:678,681c +3:678,681c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:460a +2:683,749c +3:683,749c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:462c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\User', $httpHeader), $statusCode, $httpHeader]; +2:750a +3:750a +====1 +1:466c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\User', $e->getResponseHeaders()); +2:754c +3:754c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\User', $e->getResponseHeaders()); +====1 +1:470c + +2:757a +3:757a +====1 +1:474c + +2:760a +3:760a +====1 +1:482a +2:769c +3:769c + * @throws \InvalidArgumentException +====1 +1:498a +2:786c +3:786c + * @throws \InvalidArgumentException +====1 +1:511,513c + // parse inputs + $resourcePath = "/user/login"; + $httpBody = ''; +2:799,801c +3:799,801c + + $resourcePath = '/user/login'; + $formParams = []; +====1 +1:516,521c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:804,806c +3:804,806c + $httpBody = ''; + $multipart = false; + $returnType = 'string'; +====1 +1:525c + $queryParams['username'] = $this->apiClient->getSerializer()->toQueryValue($username); +2:810c +3:810c + $queryParams['username'] = ObjectSerializer::toQueryValue($username); +====1 +1:529c + $queryParams['password'] = $this->apiClient->getSerializer()->toQueryValue($password); +2:814c +3:814c + $queryParams['password'] = ObjectSerializer::toQueryValue($password); +==== +1:531c + +2:816,818c + + + +3:816,818c + + + +====1 +1:534a +2:822c +3:822c + +====1 +1:536c + $httpBody = $formParams; // for HTTP post (form) +2:824,836c +3:824,836c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:538,547c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + 'string', + '/user/login' +2:838,846c +3:838,846c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:548a +2:848,909c +3:848,909c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:550c + return [$this->apiClient->getSerializer()->deserialize($response, 'string', $httpHeader), $statusCode, $httpHeader]; +2:910a +3:910a +====1 +1:554c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), 'string', $e->getResponseHeaders()); +2:914c +3:914c + $data = ObjectSerializer::deserialize($e->getResponseBody(), 'string', $e->getResponseHeaders()); +====1 +1:558c + +2:917a +3:917a +====1 +1:562c + +2:920a +3:920a +====1 +1:568a +2:927c +3:927c + * @throws \InvalidArgumentException +====1 +1:573,574c + list($response) = $this->logoutUserWithHttpInfo(); + return $response; +2:932c +3:932c + $this->logoutUserWithHttpInfo(); +====1 +1:582a +2:941c +3:941c + * @throws \InvalidArgumentException +====1 +1:587,589c + // parse inputs + $resourcePath = "/user/logout"; + $httpBody = ''; +2:946,948c +3:946,948c + + $resourcePath = '/user/logout'; + $formParams = []; +==== +1:592,599c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); + + +2:951,957c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + + + +3:951,957c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + + + +====1 +1:602a +2:961c +3:961c + +====1 +1:604c + $httpBody = $formParams; // for HTTP post (form) +2:963,975c +3:963,975c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:606,615c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/logout' +2:977,980c +3:977,980c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:616a +2:982,1034c +3:982,1034c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:618c + return [null, $statusCode, $httpHeader]; +2:1035a +3:1035a +====1 +1:622c + +2:1038a +3:1038a +====1 +1:626c + +2:1041a +3:1041a +====1 +1:634a +2:1050c +3:1050c + * @throws \InvalidArgumentException +====1 +1:639,640c + list($response) = $this->updateUserWithHttpInfo($username, $body); + return $response; +2:1055c +3:1055c + $this->updateUserWithHttpInfo($username, $body); +====1 +1:650a +2:1066c +3:1066c + * @throws \InvalidArgumentException +====1 +1:663,665c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:1079,1081c +3:1079,1081c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:668,673c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:1084,1087c +3:1084,1087c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:677,681c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:1091c +3:1091c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +====1 +1:682a +2:1093c +3:1093c + +====1 +1:691a +2:1103c +3:1103c + +====1 +1:693c + $httpBody = $formParams; // for HTTP post (form) +2:1105,1117c +3:1105,1117c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:695,704c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'PUT', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/{username}' +2:1119,1127c +3:1119,1127c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:705a +2:1129,1176c +3:1129,1176c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'PUT', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:707c + return [null, $statusCode, $httpHeader]; +2:1177a +3:1177a +====1 +1:711c + +2:1180a +3:1180a diff --git a/src/python/merge_conflict_analysis_diffs/1897/git_hires_merge/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/1897/git_hires_merge/diff_VERSION.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/python/merge_conflict_analysis_diffs/1897/git_hires_merge/diff_io.swagger.codegen.CodegenConfig.txt b/src/python/merge_conflict_analysis_diffs/1897/git_hires_merge/diff_io.swagger.codegen.CodegenConfig.txt new file mode 100644 index 0000000000..909e86e0be --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/git_hires_merge/diff_io.swagger.codegen.CodegenConfig.txt @@ -0,0 +1,13 @@ +==== +1:2c + io.swagger.codegen.languages.AspNet5ServerCodegen +2:1a +3:2c + io.swagger.codegen.languages.ApexClientCodegen +====3 +1:67c +2:66c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen +\ No newline at end of file +3:67c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort/diff_Models.swift.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort/diff_Models.swift.txt new file mode 100644 index 0000000000..1deb823d65 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort/diff_Models.swift.txt @@ -0,0 +1,2452 @@ +====1 +1:14c + case Error(Int, Data?, Error) +2:14,15c +3:14,15c + case HttpError(statusCode: Int, data: Data?, error: Error) + case DecodeError(response: Data?, decodeError: DecodeError) +====1 +1:37a +2:39,60c +3:39,60c + public enum Decoded { + case success(ValueType) + case failure(DecodeError) + } + + public extension Decoded { + var value: ValueType? { + switch self { + case let .success(value): + return value + case .failure: + return nil + } + } + } + + public enum DecodeError { + case typeMismatch(expected: String, actual: String) + case missingKey(key: String) + case parseError(message: String) + } + +====1 +1:42c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> T)) { +2:65c +3:65c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> Decoded)) { +====1 +1:47,50c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> T { + let key = discriminator; + if let decoder = decoders[key] { + return decoder(source, nil) as! T +2:70,73c +3:70,73c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> Decoded { + let key = discriminator + if let decoder = decoders[key], let value = decoder(source, nil) as? Decoded { + return value +====1 +1:52c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:75c +3:75c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:56,58c + static func decode(clazz: [T].Type, source: AnyObject) -> [T] { + let array = source as! [AnyObject] + return array.map { Decoders.decode(clazz: T.self, source: $0, instance: nil) } +2:79,93c +3:79,93c + static func decode(clazz: [T].Type, source: AnyObject) -> Decoded<[T]> { + if let sourceArray = source as? [AnyObject] { + var values = [T]() + for sourceValue in sourceArray { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): + values.append(value) + case let .failure(error): + return .failure(error) + } + } + return .success(values) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } +====1 +1:61,65c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> [Key:T] { + let sourceDictionary = source as! [Key: AnyObject] + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + dictionary[key] = Decoders.decode(clazz: T.self, source: value, instance: nil) +2:96,122c +3:96,122c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> Decoded<[Key:T]> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): + dictionary[key] = value + case let .failure(error): + return .failure(error) + } + } + return .success(dictionary) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } + } + + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + guard !(source is NSNull), source != nil else { return .success(nil) } + if let value = source as? T.RawValue { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "\(T.RawValue.self) matching a case from the enumeration \(T.self)", actual: String(describing: type(of: source)))) +====1 +1:67c + return dictionary +2:123a +3:123a +====1 +1:70c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> T { +2:126c +3:126c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> Decoded { +====1 +1:72,73c + if T.self is Int32.Type && source is NSNumber { + return (source as! NSNumber).int32Value as! T; +2:128,129c +3:128,129c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int32.Type { + return .success(value) +====1 +1:75,76c + if T.self is Int64.Type && source is NSNumber { + return source.int64Value as! T; +2:131,132c +3:131,132c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int64.Type { + return .success(value) +====1 +1:78,79c + if T.self is UUID.Type && source is String { + return UUID(uuidString: source as! String) as! T +2:134,135c +3:134,135c + if let intermediate = source as? String, let value = UUID(uuidString: intermediate) as? T, source is String, T.self is UUID.Type { + return .success(value) +====1 +1:81,82c + if source is T { + return source as! T +2:137,138c +3:137,138c + if let value = source as? T { + return .success(value) +====1 +1:84,85c + if T.self is Data.Type && source is String { + return Data(base64Encoded: source as! String) as! T +2:140,141c +3:140,141c + if let intermediate = source as? String, let value = Data(base64Encoded: intermediate) as? T { + return .success(value) +====1 +1:89,90c + if let decoder = decoders[key] { + return decoder(source, instance) as! T +2:145,146c +3:145,146c + if let decoder = decoders[key], let value = decoder(source, instance) as? Decoded { + return value +====1 +1:92c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:148c +3:148c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:96,102c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> T? { + if source is NSNull { + return nil + } + return source.map { (source: AnyObject) -> T in + Decoders.decode(clazz: clazz, source: source, instance: nil) + } +2:152,154c +3:152,154c + //Convert a Decoded so that its value is optional. DO WE STILL NEED THIS? + static func toOptional(decoded: Decoded) -> Decoded { + return .success(decoded.value) +====1 +1:105,107c + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> [T]? { + if source is NSNull { + return nil +2:157,164c +3:157,164c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + if let source = source, !(source is NSNull) { + switch Decoders.decode(clazz: clazz, source: source, instance: nil) { + case let .success(value): return .success(value) + case let .failure(error): return .failure(error) + } + } else { + return .success(nil) +====1 +1:109,110c + return source.map { (someSource: AnyObject) -> [T] in + Decoders.decode(clazz: clazz, source: someSource) +2:166,179c +3:166,179c + } + + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> Decoded<[T]?> { + if let source = source as? [AnyObject] { + var values = [T]() + for sourceValue in source { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): values.append(value) + case let .failure(error): return .failure(error) + } + } + return .success(values) + } else { + return .success(nil) +====1 +1:114,116c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> [Key:T]? { + if source is NSNull { + return nil +2:183,194c +3:183,194c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> Decoded<[Key:T]?> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): dictionary[key] = value + case let .failure(error): return .failure(error) + } + } + return .success(dictionary) + } else { + return .success(nil) +====1 +1:118,119c + return source.map { (someSource: AnyObject) -> [Key:T] in + Decoders.decode(clazz: clazz, source: someSource) +2:196,206c +3:196,206c + } + + static func decodeOptional(clazz: T, source: AnyObject) -> Decoded { + if let value = source as? U { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "String", actual: String(describing: type(of: source)))) +====1 +1:122a +2:210c +3:210c + +====1 +1:137c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Date in +2:225c +3:225c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:141c + return date +2:229c +3:229c + return .success(date) +====1 +1:145c + if let sourceInt = source as? Int64 { +2:233c +3:233c + if let sourceInt = source as? Int { +====1 +1:147c + return Date(timeIntervalSince1970: Double(sourceInt / 1000) ) +2:235,250c +3:235,250c + return .success(Date(timeIntervalSince1970: Double(sourceInt / 1000) )) + } + if source is String || source is Int { + return .failure(.parseError(message: "Could not decode date")) + } else { + return .failure(.typeMismatch(expected: "String or Int", actual: "\(source)")) + } + } + + // Decoder for ISOFullDate + Decoders.addDecoder(clazz: ISOFullDate.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let string = source as? String, + let isoDate = ISOFullDate.from(string: string) { + return .success(isoDate) + } else { + return .failure(.typeMismatch(expected: "ISO date", actual: "\(source)")) +====1 +1:149c + fatalError("formatter failed to parse \(source)") +2:251a +3:251a +====1 +1:152,155c + // Decoder for [AdditionalPropertiesClass] + Decoders.addDecoder(clazz: [AdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [AdditionalPropertiesClass] in + return Decoders.decode(clazz: [AdditionalPropertiesClass].self, source: source) + } +2:253a +3:253a +====1 +1:157,163c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> AdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + + result.mapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_property"] as AnyObject?) + result.mapOfMapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_of_map_property"] as AnyObject?) + return result +2:255,273c +3:255,273c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: [String:String].self, source: sourceDictionary["map_property"] as AnyObject?) { + + case let .success(value): result.mapProperty = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_of_map_property"] as AnyObject?) { + + case let .success(value): result.mapOfMapProperty = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "AdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:167,170c + // Decoder for [Animal] + Decoders.addDecoder(clazz: [Animal].self) { (source: AnyObject, instance: AnyObject?) -> [Animal] in + return Decoders.decode(clazz: [Animal].self, source: source) + } +2:276a +3:276a +====1 +1:172,176c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Animal in + let sourceDictionary = source as! [AnyHashable: Any] + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal" { + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) +2:278,299c +3:278,299c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal"{ + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) + } + let result = instance == nil ? Animal() : instance as! Animal + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Animal", actual: "\(source)")) +====1 +1:178,182c + let result = instance == nil ? Animal() : instance as! Animal + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + return result +2:300a +3:300a +====1 +1:186,194c + // Decoder for [AnimalFarm] + Decoders.addDecoder(clazz: [AnimalFarm].self) { (source: AnyObject, instance: AnyObject?) -> [AnimalFarm] in + return Decoders.decode(clazz: [AnimalFarm].self, source: source) + } + // Decoder for AnimalFarm + Decoders.addDecoder(clazz: AnimalFarm.self) { (source: AnyObject, instance: AnyObject?) -> AnimalFarm in + let sourceArray = source as! [AnyObject] + return sourceArray.map({ Decoders.decode(clazz: Animal.self, source: $0, instance: nil) }) + } +2:303a +3:303a +====1 +1:197,200c + // Decoder for [ApiResponse] + Decoders.addDecoder(clazz: [ApiResponse].self) { (source: AnyObject, instance: AnyObject?) -> [ApiResponse] in + return Decoders.decode(clazz: [ApiResponse].self, source: source) + } +2:306c +3:306c + +====1 +1:202,209c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> ApiResponse in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + + result.code = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) + result.type = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) + result.message = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) + return result +2:308,332c +3:308,332c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) { + + case let .success(value): result.code = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) { + + case let .success(value): result.type = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) { + + case let .success(value): result.message = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ApiResponse", actual: "\(source)")) + } +====1 +1:213,216c + // Decoder for [ArrayOfArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfArrayOfNumberOnly].self, source: source) + } +2:335a +3:335a +====1 +1:218,223c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + + result.arrayArrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) + return result +2:337,349c +3:337,349c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [[Double]].self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayArrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:227,230c + // Decoder for [ArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfNumberOnly].self, source: source) + } +2:352a +3:352a +====1 +1:232,237c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + + result.arrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayNumber"] as AnyObject?) + return result +2:354,366c +3:354,366c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [Double].self, source: sourceDictionary["ArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:241,244c + // Decoder for [ArrayTest] + Decoders.addDecoder(clazz: [ArrayTest].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayTest] in + return Decoders.decode(clazz: [ArrayTest].self, source: source) + } +2:369a +3:369a +====1 +1:246,253c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> ArrayTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + + result.arrayOfString = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_of_string"] as AnyObject?) + result.arrayArrayOfInteger = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) + result.arrayArrayOfModel = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_model"] as AnyObject?) + return result +2:371,395c +3:371,395c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["array_of_string"] as AnyObject?) { + + case let .success(value): result.arrayOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[Int64]].self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[ReadOnlyFirst]].self, source: sourceDictionary["array_array_of_model"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfModel = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayTest", actual: "\(source)")) + } +====1 +1:257,260c + // Decoder for [Capitalization] + Decoders.addDecoder(clazz: [Capitalization].self) { (source: AnyObject, instance: AnyObject?) -> [Capitalization] in + return Decoders.decode(clazz: [Capitalization].self, source: source) + } +2:398a +3:398a +====1 +1:262,272c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Capitalization in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Capitalization() : instance as! Capitalization + + result.smallCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) + result.capitalCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) + result.smallSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) + result.capitalSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) + result.sCAETHFlowPoints = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) + result.ATT_NAME = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) + return result +2:400,442c +3:400,442c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Capitalization() : instance as! Capitalization + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) { + + case let .success(value): result.smallCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) { + + case let .success(value): result.capitalCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) { + + case let .success(value): result.smallSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) { + + case let .success(value): result.capitalSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) { + + case let .success(value): result.sCAETHFlowPoints = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) { + + case let .success(value): result.ATT_NAME = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Capitalization", actual: "\(source)")) + } +====1 +1:276,279c + // Decoder for [Cat] + Decoders.addDecoder(clazz: [Cat].self) { (source: AnyObject, instance: AnyObject?) -> [Cat] in + return Decoders.decode(clazz: [Cat].self, source: source) + } +2:445a +3:445a +====1 +1:281,291c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Cat in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.declawed = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) + return result +2:447,474c +3:447,474c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) { + + case let .success(value): result.declawed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Cat", actual: "\(source)")) + } +====1 +1:295,298c + // Decoder for [Category] + Decoders.addDecoder(clazz: [Category].self) { (source: AnyObject, instance: AnyObject?) -> [Category] in + return Decoders.decode(clazz: [Category].self, source: source) + } +2:477a +3:477a +====1 +1:300,306c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Category in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Category() : instance as! Category + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:479,497c +3:479,497c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Category() : instance as! Category + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Category", actual: "\(source)")) + } +====1 +1:310,313c + // Decoder for [ClassModel] + Decoders.addDecoder(clazz: [ClassModel].self) { (source: AnyObject, instance: AnyObject?) -> [ClassModel] in + return Decoders.decode(clazz: [ClassModel].self, source: source) + } +2:500a +3:500a +====1 +1:315,320c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> ClassModel in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ClassModel() : instance as! ClassModel + + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) + return result +2:502,514c +3:502,514c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ClassModel() : instance as! ClassModel + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ClassModel", actual: "\(source)")) + } +====1 +1:324,327c + // Decoder for [Client] + Decoders.addDecoder(clazz: [Client].self) { (source: AnyObject, instance: AnyObject?) -> [Client] in + return Decoders.decode(clazz: [Client].self, source: source) + } +2:517a +3:517a +====1 +1:329,334c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Client in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Client() : instance as! Client + + result.client = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) + return result +2:519,531c +3:519,531c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Client() : instance as! Client + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) { + + case let .success(value): result.client = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Client", actual: "\(source)")) + } +====1 +1:338,341c + // Decoder for [Dog] + Decoders.addDecoder(clazz: [Dog].self) { (source: AnyObject, instance: AnyObject?) -> [Dog] in + return Decoders.decode(clazz: [Dog].self, source: source) + } +2:534a +3:534a +====1 +1:343,353c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Dog in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.breed = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) + return result +2:536,563c +3:536,563c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) { + + case let .success(value): result.breed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Dog", actual: "\(source)")) + } +====1 +1:357,360c + // Decoder for [EnumArrays] + Decoders.addDecoder(clazz: [EnumArrays].self) { (source: AnyObject, instance: AnyObject?) -> [EnumArrays] in + return Decoders.decode(clazz: [EnumArrays].self, source: source) + } +2:566a +3:566a +====1 +1:362,371c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> EnumArrays in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + + if let justSymbol = sourceDictionary["just_symbol"] as? String { + result.justSymbol = EnumArrays.JustSymbol(rawValue: (justSymbol)) + } + + if let arrayEnum = sourceDictionary["array_enum"] as? [String] { + result.arrayEnum = arrayEnum.map ({ EnumArrays.ArrayEnum(rawValue: $0)! }) +2:568,585c +3:568,585c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + switch Decoders.decodeOptional(clazz: EnumArrays.JustSymbol.self, source: sourceDictionary["just_symbol"] as AnyObject?) { + + case let .success(value): result.justSymbol = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_enum"] as AnyObject?) { + + case let .success(value): result.arrayEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumArrays", actual: "\(source)")) +====1 +1:373,374c + + return result +2:586a +3:586a +====1 +1:378,381c + // Decoder for [EnumClass] + Decoders.addDecoder(clazz: [EnumClass].self) { (source: AnyObject, instance: AnyObject?) -> [EnumClass] in + return Decoders.decode(clazz: [EnumClass].self, source: source) + } +2:589a +3:589a +====1 +1:383,389c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> EnumClass in + if let source = source as? String { + if let result = EnumClass(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type EnumClass: Maybe swagger file is insufficient") +2:591,593c +3:591,593c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: EnumClass.self, source: source, instance: instance) +====1 +1:393,396c + // Decoder for [EnumTest] + Decoders.addDecoder(clazz: [EnumTest].self) { (source: AnyObject, instance: AnyObject?) -> [EnumTest] in + return Decoders.decode(clazz: [EnumTest].self, source: source) + } +2:596a +3:596a +==== +1:398,415c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> EnumTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumTest() : instance as! EnumTest + + if let enumString = sourceDictionary["enum_string"] as? String { + result.enumString = EnumTest.EnumString(rawValue: (enumString)) + } + + if let enumInteger = sourceDictionary["enum_integer"] as? Int32 { + result.enumInteger = EnumTest.EnumInteger(rawValue: (enumInteger)) + } + + if let enumNumber = sourceDictionary["enum_number"] as? Double { + result.enumNumber = EnumTest.EnumNumber(rawValue: (enumNumber)) + } + + result.outerEnum = Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) + return result +2:598,1062c + <<<<<<< HEAD + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumTest() : instance as! EnumTest + switch Decoders.decodeOptional(clazz: EnumTest.EnumString.self, source: sourceDictionary["enum_string"] as AnyObject?) { + + case let .success(value): result.enumString = value + case let .failure(error): return .failure(error) + + ||||||| 4479382ced + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> EnumTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumTest() : instance as! EnumTest + + if let enumString = sourceDictionary["enum_string"] as? String { + result.enumString = EnumTest.EnumString(rawValue: (enumString)) + } + + if let enumInteger = sourceDictionary["enum_integer"] as? Int32 { + result.enumInteger = EnumTest.EnumInteger(rawValue: (enumInteger)) + } + + if let enumNumber = sourceDictionary["enum_number"] as? Double { + result.enumNumber = EnumTest.EnumNumber(rawValue: (enumNumber)) + } + + result.outerEnum = Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) + return result + } + + + // Decoder for [FormatTest] + Decoders.addDecoder(clazz: [FormatTest].self) { (source: AnyObject, instance: AnyObject?) -> [FormatTest] in + return Decoders.decode(clazz: [FormatTest].self, source: source) + } + // Decoder for FormatTest + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> FormatTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? FormatTest() : instance as! FormatTest + + result.integer = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) + result.int32 = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) + result.int64 = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) + result.number = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) + result.float = Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) + result.double = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) + result.string = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) + result.byte = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) + result.binary = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) + result.date = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["date"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + return result + } + + + // Decoder for [HasOnlyReadOnly] + Decoders.addDecoder(clazz: [HasOnlyReadOnly].self) { (source: AnyObject, instance: AnyObject?) -> [HasOnlyReadOnly] in + return Decoders.decode(clazz: [HasOnlyReadOnly].self, source: source) + } + // Decoder for HasOnlyReadOnly + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> HasOnlyReadOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.foo = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) + return result + } + + + // Decoder for [List] + Decoders.addDecoder(clazz: [List].self) { (source: AnyObject, instance: AnyObject?) -> [List] in + return Decoders.decode(clazz: [List].self, source: source) + } + // Decoder for List + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> List in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? List() : instance as! List + + result._123List = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) + return result + } + + + // Decoder for [MapTest] + Decoders.addDecoder(clazz: [MapTest].self) { (source: AnyObject, instance: AnyObject?) -> [MapTest] in + return Decoders.decode(clazz: [MapTest].self, source: source) + } + // Decoder for MapTest + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> MapTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MapTest() : instance as! MapTest + + result.mapMapOfString = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_map_of_string"] as AnyObject?) + if let mapOfEnumString = sourceDictionary["map_of_enum_string"] as? [String:String] { //TODO: handle enum map scenario + } + + return result + } + + + // Decoder for [MixedPropertiesAndAdditionalPropertiesClass] + Decoders.addDecoder(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [MixedPropertiesAndAdditionalPropertiesClass] in + return Decoders.decode(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self, source: source) + } + // Decoder for MixedPropertiesAndAdditionalPropertiesClass + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> MixedPropertiesAndAdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.map = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map"] as AnyObject?) + return result + } + + + // Decoder for [Model200Response] + Decoders.addDecoder(clazz: [Model200Response].self) { (source: AnyObject, instance: AnyObject?) -> [Model200Response] in + return Decoders.decode(clazz: [Model200Response].self, source: source) + } + // Decoder for Model200Response + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Model200Response in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Model200Response() : instance as! Model200Response + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) + return result + } + + + // Decoder for [Name] + Decoders.addDecoder(clazz: [Name].self) { (source: AnyObject, instance: AnyObject?) -> [Name] in + return Decoders.decode(clazz: [Name].self, source: source) + } + // Decoder for Name + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Name in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Name() : instance as! Name + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result.snakeCase = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) + result.property = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) + result._123Number = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) + return result + } + + + // Decoder for [NumberOnly] + Decoders.addDecoder(clazz: [NumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [NumberOnly] in + return Decoders.decode(clazz: [NumberOnly].self, source: source) + } + // Decoder for NumberOnly + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> NumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + + result.justNumber = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) + return result + } + + + // Decoder for [Order] + Decoders.addDecoder(clazz: [Order].self) { (source: AnyObject, instance: AnyObject?) -> [Order] in + return Decoders.decode(clazz: [Order].self, source: source) + } + // Decoder for Order + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Order in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Order() : instance as! Order + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.petId = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) + result.quantity = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) + result.shipDate = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Order.Status(rawValue: (status)) + } + + result.complete = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) + return result + } + + + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } + // Decoder for OuterBoolean + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject) -> OuterBoolean in + if let source = source as? Bool { + return source + } + fatalError("Source \(source) is not convertible to typealias OuterBoolean: Maybe swagger file is insufficient") + } + + + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } + // Decoder for OuterComposite + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + + let instance = OuterComposite() + instance.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + instance.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + instance.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return instance + } + + + // Decoder for [OuterEnum] + Decoders.addDecoder(clazz: [OuterEnum].self) { (source: AnyObject, instance: AnyObject?) -> [OuterEnum] in + return Decoders.decode(clazz: [OuterEnum].self, source: source) + } + // Decoder for OuterEnum + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> OuterEnum in + if let source = source as? String { + if let result = OuterEnum(rawValue: source) { + return result + ======= + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> EnumTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumTest() : instance as! EnumTest + + if let enumString = sourceDictionary["enum_string"] as? String { + result.enumString = EnumTest.EnumString(rawValue: (enumString)) + } + + if let enumInteger = sourceDictionary["enum_integer"] as? Int32 { + result.enumInteger = EnumTest.EnumInteger(rawValue: (enumInteger)) + } + + if let enumNumber = sourceDictionary["enum_number"] as? Double { + result.enumNumber = EnumTest.EnumNumber(rawValue: (enumNumber)) + } + + result.outerEnum = Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) + return result + } + + + // Decoder for [FormatTest] + Decoders.addDecoder(clazz: [FormatTest].self) { (source: AnyObject, instance: AnyObject?) -> [FormatTest] in + return Decoders.decode(clazz: [FormatTest].self, source: source) + } + // Decoder for FormatTest + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> FormatTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? FormatTest() : instance as! FormatTest + + result.integer = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) + result.int32 = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) + result.int64 = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) + result.number = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) + result.float = Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) + result.double = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) + result.string = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) + result.byte = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) + result.binary = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) + result.date = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["date"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + return result + } + + + // Decoder for [HasOnlyReadOnly] + Decoders.addDecoder(clazz: [HasOnlyReadOnly].self) { (source: AnyObject, instance: AnyObject?) -> [HasOnlyReadOnly] in + return Decoders.decode(clazz: [HasOnlyReadOnly].self, source: source) + } + // Decoder for HasOnlyReadOnly + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> HasOnlyReadOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.foo = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) + return result + } + + + // Decoder for [List] + Decoders.addDecoder(clazz: [List].self) { (source: AnyObject, instance: AnyObject?) -> [List] in + return Decoders.decode(clazz: [List].self, source: source) + } + // Decoder for List + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> List in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? List() : instance as! List + + result._123List = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) + return result + } + + + // Decoder for [MapTest] + Decoders.addDecoder(clazz: [MapTest].self) { (source: AnyObject, instance: AnyObject?) -> [MapTest] in + return Decoders.decode(clazz: [MapTest].self, source: source) + } + // Decoder for MapTest + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> MapTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MapTest() : instance as! MapTest + + result.mapMapOfString = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_map_of_string"] as AnyObject?) + if let mapOfEnumString = sourceDictionary["map_of_enum_string"] as? [String:String] { //TODO: handle enum map scenario + } + + return result + } + + + // Decoder for [MixedPropertiesAndAdditionalPropertiesClass] + Decoders.addDecoder(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [MixedPropertiesAndAdditionalPropertiesClass] in + return Decoders.decode(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self, source: source) + } + // Decoder for MixedPropertiesAndAdditionalPropertiesClass + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> MixedPropertiesAndAdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.map = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map"] as AnyObject?) + return result + } + + + // Decoder for [Model200Response] + Decoders.addDecoder(clazz: [Model200Response].self) { (source: AnyObject, instance: AnyObject?) -> [Model200Response] in + return Decoders.decode(clazz: [Model200Response].self, source: source) + } + // Decoder for Model200Response + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Model200Response in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Model200Response() : instance as! Model200Response + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) + return result + } + + + // Decoder for [Name] + Decoders.addDecoder(clazz: [Name].self) { (source: AnyObject, instance: AnyObject?) -> [Name] in + return Decoders.decode(clazz: [Name].self, source: source) + } + // Decoder for Name + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Name in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Name() : instance as! Name + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result.snakeCase = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) + result.property = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) + result._123Number = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) + return result + } + + + // Decoder for [NumberOnly] + Decoders.addDecoder(clazz: [NumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [NumberOnly] in + return Decoders.decode(clazz: [NumberOnly].self, source: source) + } + // Decoder for NumberOnly + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> NumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + + result.justNumber = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) + return result + } + + + // Decoder for [Order] + Decoders.addDecoder(clazz: [Order].self) { (source: AnyObject, instance: AnyObject?) -> [Order] in + return Decoders.decode(clazz: [Order].self, source: source) + } + // Decoder for Order + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Order in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Order() : instance as! Order + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.petId = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) + result.quantity = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) + result.shipDate = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Order.Status(rawValue: (status)) + } + + result.complete = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) + return result + } + + + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject, instance: AnyObject?) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } + // Decoder for OuterBoolean + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> OuterBoolean in + if let source = source as? Bool { + return source + } + fatalError("Source \(source) is not convertible to typealias OuterBoolean: Maybe swagger file is insufficient") + } + + + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject, instance: AnyObject?) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } + // Decoder for OuterComposite + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + + result.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + result.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + result.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return result + } + + + // Decoder for [OuterEnum] + Decoders.addDecoder(clazz: [OuterEnum].self) { (source: AnyObject, instance: AnyObject?) -> [OuterEnum] in + return Decoders.decode(clazz: [OuterEnum].self, source: source) + } + // Decoder for OuterEnum + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> OuterEnum in + if let source = source as? String { + if let result = OuterEnum(rawValue: source) { + return result + >>>>>>> TEMP_RIGHT_BRANCH + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumInteger.self, source: sourceDictionary["enum_integer"] as AnyObject?) { + + case let .success(value): result.enumInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumNumber.self, source: sourceDictionary["enum_number"] as AnyObject?) { + + case let .success(value): result.enumNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) { + + case let .success(value): result.outerEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumTest", actual: "\(source)")) + } +3:598,628c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumTest() : instance as! EnumTest + switch Decoders.decodeOptional(clazz: EnumTest.EnumString.self, source: sourceDictionary["enum_string"] as AnyObject?) { + + case let .success(value): result.enumString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumInteger.self, source: sourceDictionary["enum_integer"] as AnyObject?) { + + case let .success(value): result.enumInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumNumber.self, source: sourceDictionary["enum_number"] as AnyObject?) { + + case let .success(value): result.enumNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) { + + case let .success(value): result.outerEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumTest", actual: "\(source)")) + } +==== +1:419,422c + // Decoder for [FormatTest] + Decoders.addDecoder(clazz: [FormatTest].self) { (source: AnyObject, instance: AnyObject?) -> [FormatTest] in + return Decoders.decode(clazz: [FormatTest].self, source: source) + } +2:1066c + <<<<<<< HEAD +3:631a +==== +1:424,441c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> FormatTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? FormatTest() : instance as! FormatTest + + result.integer = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) + result.int32 = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) + result.int64 = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) + result.number = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) + result.float = Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) + result.double = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) + result.string = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) + result.byte = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) + result.binary = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) + result.date = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["date"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + return result +2:1068,1161c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? FormatTest() : instance as! FormatTest + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) { + + case let .success(value): result.integer = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) { + + case let .success(value): result.int32 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) { + + case let .success(value): result.int64 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) { + + case let .success(value): result.number = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) { + + case let .success(value): result.float = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) { + + case let .success(value): result.double = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) { + + case let .success(value): result.string = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) { + + case let .success(value): result.byte = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) { + + case let .success(value): result.binary = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: ISOFullDate.self, source: sourceDictionary["date"] as AnyObject?) { + + case let .success(value): result.date = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "FormatTest", actual: "\(source)")) + } + ||||||| 4479382ced + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + ======= + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject, instance: AnyObject?) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + >>>>>>> TEMP_RIGHT_BRANCH +3:633,717c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? FormatTest() : instance as! FormatTest + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) { + + case let .success(value): result.integer = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) { + + case let .success(value): result.int32 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) { + + case let .success(value): result.int64 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) { + + case let .success(value): result.number = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) { + + case let .success(value): result.float = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) { + + case let .success(value): result.double = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) { + + case let .success(value): result.string = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) { + + case let .success(value): result.byte = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) { + + case let .success(value): result.binary = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: ISOFullDate.self, source: sourceDictionary["date"] as AnyObject?) { + + case let .success(value): result.date = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "FormatTest", actual: "\(source)")) + } +====1 +1:445,448c + // Decoder for [HasOnlyReadOnly] + Decoders.addDecoder(clazz: [HasOnlyReadOnly].self) { (source: AnyObject, instance: AnyObject?) -> [HasOnlyReadOnly] in + return Decoders.decode(clazz: [HasOnlyReadOnly].self, source: source) + } +2:1164a +3:720a +====1 +1:450,456c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> HasOnlyReadOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.foo = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) + return result +2:1166,1184c +3:722,740c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) { + + case let .success(value): result.foo = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "HasOnlyReadOnly", actual: "\(source)")) + } +====1 +1:460,463c + // Decoder for [List] + Decoders.addDecoder(clazz: [List].self) { (source: AnyObject, instance: AnyObject?) -> [List] in + return Decoders.decode(clazz: [List].self, source: source) + } +2:1187a +3:743a +====1 +1:465,470c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> List in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? List() : instance as! List + + result._123List = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) + return result +2:1189,1201c +3:745,757c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? List() : instance as! List + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) { + + case let .success(value): result._123List = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "List", actual: "\(source)")) + } +====1 +1:474,477c + // Decoder for [MapTest] + Decoders.addDecoder(clazz: [MapTest].self) { (source: AnyObject, instance: AnyObject?) -> [MapTest] in + return Decoders.decode(clazz: [MapTest].self, source: source) + } +2:1204a +3:760a +====1 +1:479,484c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> MapTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MapTest() : instance as! MapTest + + result.mapMapOfString = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_map_of_string"] as AnyObject?) + if let mapOfEnumString = sourceDictionary["map_of_enum_string"] as? [String:String] { //TODO: handle enum map scenario +2:1206,1223c +3:762,779c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MapTest() : instance as! MapTest + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_map_of_string"] as AnyObject?) { + + case let .success(value): result.mapMapOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: MapTest.MapOfEnumString.self, source: sourceDictionary["map_of_enum_string"] as AnyObject?) { + /* + case let .success(value): result.mapOfEnumString = value + case let .failure(error): return .failure(error) + */ default: break //TODO: handle enum map scenario + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MapTest", actual: "\(source)")) +====1 +1:486,487c + + return result +2:1224a +3:780a +====1 +1:491,494c + // Decoder for [MixedPropertiesAndAdditionalPropertiesClass] + Decoders.addDecoder(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [MixedPropertiesAndAdditionalPropertiesClass] in + return Decoders.decode(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self, source: source) + } +2:1227a +3:783a +====1 +1:496,503c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> MixedPropertiesAndAdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.map = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map"] as AnyObject?) + return result +2:1229,1253c +3:785,809c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:Animal].self, source: sourceDictionary["map"] as AnyObject?) { + + case let .success(value): result.map = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MixedPropertiesAndAdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:507,510c + // Decoder for [Model200Response] + Decoders.addDecoder(clazz: [Model200Response].self) { (source: AnyObject, instance: AnyObject?) -> [Model200Response] in + return Decoders.decode(clazz: [Model200Response].self, source: source) + } +2:1256a +3:812a +====1 +1:512,518c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Model200Response in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Model200Response() : instance as! Model200Response + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) + return result +2:1258,1276c +3:814,832c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Model200Response() : instance as! Model200Response + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Model200Response", actual: "\(source)")) + } +====1 +1:522,525c + // Decoder for [Name] + Decoders.addDecoder(clazz: [Name].self) { (source: AnyObject, instance: AnyObject?) -> [Name] in + return Decoders.decode(clazz: [Name].self, source: source) + } +2:1279a +3:835a +====1 +1:527,535c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Name in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Name() : instance as! Name + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result.snakeCase = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) + result.property = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) + result._123Number = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) + return result +2:1281,1311c +3:837,867c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Name() : instance as! Name + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) { + + case let .success(value): result.snakeCase = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) { + + case let .success(value): result.property = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) { + + case let .success(value): result._123Number = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Name", actual: "\(source)")) + } +====1 +1:539,542c + // Decoder for [NumberOnly] + Decoders.addDecoder(clazz: [NumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [NumberOnly] in + return Decoders.decode(clazz: [NumberOnly].self, source: source) + } +2:1314a +3:870a +====1 +1:544,549c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> NumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + + result.justNumber = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) + return result +2:1316,1328c +3:872,884c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) { + + case let .success(value): result.justNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "NumberOnly", actual: "\(source)")) + } +====1 +1:553,556c + // Decoder for [Order] + Decoders.addDecoder(clazz: [Order].self) { (source: AnyObject, instance: AnyObject?) -> [Order] in + return Decoders.decode(clazz: [Order].self, source: source) + } +2:1331a +3:887a +====1 +1:558,571c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Order in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Order() : instance as! Order + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.petId = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) + result.quantity = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) + result.shipDate = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Order.Status(rawValue: (status)) + } + + result.complete = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) + return result +2:1333,1375c +3:889,931c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Order() : instance as! Order + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) { + + case let .success(value): result.petId = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) { + + case let .success(value): result.quantity = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) { + + case let .success(value): result.shipDate = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Order.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) { + + case let .success(value): result.complete = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Order", actual: "\(source)")) + } +====1 +1:575,578c + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } +2:1378a +3:934a +====1 +1:580c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject) -> OuterBoolean in +2:1380c +3:936c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:582c + return source +2:1382,1384c +3:938,940c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterBoolean", actual: "\(source)")) +====1 +1:584c + fatalError("Source \(source) is not convertible to typealias OuterBoolean: Maybe swagger file is insufficient") +2:1385a +3:941a +====1 +1:588,591c + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } +2:1388a +3:944a +====1 +1:593,600c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + + let instance = OuterComposite() + instance.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + instance.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + instance.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return instance +2:1390,1414c +3:946,970c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + switch Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) { + + case let .success(value): result.myNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) { + + case let .success(value): result.myString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) { + + case let .success(value): result.myBoolean = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "OuterComposite", actual: "\(source)")) + } +====1 +1:604,607c + // Decoder for [OuterEnum] + Decoders.addDecoder(clazz: [OuterEnum].self) { (source: AnyObject, instance: AnyObject?) -> [OuterEnum] in + return Decoders.decode(clazz: [OuterEnum].self, source: source) + } +2:1417a +3:973a +====1 +1:609,615c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> OuterEnum in + if let source = source as? String { + if let result = OuterEnum(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type OuterEnum: Maybe swagger file is insufficient") +2:1419,1421c +3:975,977c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: OuterEnum.self, source: source, instance: instance) +====1 +1:619,622c + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + } +2:1424a +3:980a +==== +1:624c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject) -> OuterNumber in +2:1426,1432c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject) -> OuterNumber in + ======= + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> OuterNumber in + >>>>>>> TEMP_RIGHT_BRANCH +3:982c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:626c + return source +2:1434,1436c +3:984,986c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterNumber", actual: "\(source)")) +====1 +1:628c + fatalError("Source \(source) is not convertible to typealias OuterNumber: Maybe swagger file is insufficient") +2:1437a +3:987a +==== +1:632,635c + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } +2:1441,1452c + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } + ======= + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject, instance: AnyObject?) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } + >>>>>>> TEMP_RIGHT_BRANCH +3:990a +==== +1:637c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject) -> OuterString in +2:1454,1460c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject) -> OuterString in + ======= + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> OuterString in + >>>>>>> TEMP_RIGHT_BRANCH +3:992c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:639c + return source +2:1462,1464c +3:994,996c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterString", actual: "\(source)")) +====1 +1:641c + fatalError("Source \(source) is not convertible to typealias OuterString: Maybe swagger file is insufficient") +2:1465a +3:997a +====1 +1:645,648c + // Decoder for [Pet] + Decoders.addDecoder(clazz: [Pet].self) { (source: AnyObject, instance: AnyObject?) -> [Pet] in + return Decoders.decode(clazz: [Pet].self, source: source) + } +2:1468a +3:1000a +====1 +1:650,663c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Pet in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Pet() : instance as! Pet + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.category = Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + result.photoUrls = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["photoUrls"] as AnyObject?) + result.tags = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["tags"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Pet.Status(rawValue: (status)) + } + + return result +2:1470,1512c +3:1002,1044c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Pet() : instance as! Pet + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) { + + case let .success(value): result.category = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["photoUrls"] as AnyObject?) { + + case let .success(value): result.photoUrls = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [Tag].self, source: sourceDictionary["tags"] as AnyObject?) { + + case let .success(value): result.tags = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Pet.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Pet", actual: "\(source)")) + } +====1 +1:667,670c + // Decoder for [ReadOnlyFirst] + Decoders.addDecoder(clazz: [ReadOnlyFirst].self) { (source: AnyObject, instance: AnyObject?) -> [ReadOnlyFirst] in + return Decoders.decode(clazz: [ReadOnlyFirst].self, source: source) + } +2:1515a +3:1047a +====1 +1:672,678c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> ReadOnlyFirst in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.baz = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) + return result +2:1517,1535c +3:1049,1067c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) { + + case let .success(value): result.baz = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ReadOnlyFirst", actual: "\(source)")) + } +====1 +1:682,685c + // Decoder for [Return] + Decoders.addDecoder(clazz: [Return].self) { (source: AnyObject, instance: AnyObject?) -> [Return] in + return Decoders.decode(clazz: [Return].self, source: source) + } +2:1538a +3:1070a +====1 +1:687,692c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Return in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Return() : instance as! Return + + result._return = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) + return result +2:1540,1552c +3:1072,1084c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Return() : instance as! Return + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) { + + case let .success(value): result._return = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Return", actual: "\(source)")) + } +====1 +1:696,699c + // Decoder for [SpecialModelName] + Decoders.addDecoder(clazz: [SpecialModelName].self) { (source: AnyObject, instance: AnyObject?) -> [SpecialModelName] in + return Decoders.decode(clazz: [SpecialModelName].self, source: source) + } +2:1555a +3:1087a +====1 +1:701,706c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> SpecialModelName in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + + result.specialPropertyName = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) + return result +2:1557,1569c +3:1089,1101c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) { + + case let .success(value): result.specialPropertyName = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "SpecialModelName", actual: "\(source)")) + } +====1 +1:710,713c + // Decoder for [Tag] + Decoders.addDecoder(clazz: [Tag].self) { (source: AnyObject, instance: AnyObject?) -> [Tag] in + return Decoders.decode(clazz: [Tag].self, source: source) + } +2:1572a +3:1104a +====1 +1:715,721c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Tag in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Tag() : instance as! Tag + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:1574,1592c +3:1106,1124c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Tag() : instance as! Tag + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Tag", actual: "\(source)")) + } +====1 +1:725,728c + // Decoder for [User] + Decoders.addDecoder(clazz: [User].self) { (source: AnyObject, instance: AnyObject?) -> [User] in + return Decoders.decode(clazz: [User].self, source: source) + } +2:1595a +3:1127a +====1 +1:730,742c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> User in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? User() : instance as! User + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.username = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) + result.firstName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) + result.lastName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) + result.email = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + result.phone = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) + result.userStatus = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) + return result +2:1597,1651c +3:1129,1183c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? User() : instance as! User + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) { + + case let .success(value): result.username = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) { + + case let .success(value): result.firstName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) { + + case let .success(value): result.lastName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) { + + case let .success(value): result.email = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) { + + case let .success(value): result.phone = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) { + + case let .success(value): result.userStatus = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "User", actual: "\(source)")) + } +====1 +1:749c + } +\ No newline at end of file +2:1658c +3:1190c + } diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort/diff_PetApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort/diff_PetApi.php.txt new file mode 100644 index 0000000000..8a89185203 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort/diff_PetApi.php.txt @@ -0,0 +1,1662 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return PetApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->addPetWithHttpInfo($body); + return $response; +2:96c +3:96c + $this->addPetWithHttpInfo($body); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/pet"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/pet'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/json', 'application/xml']); +2:120,124c +3:120,124c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:140a +2:135c +3:135c + +====1 +1:142c + $httpBody = $formParams; // for HTTP post (form) +2:137,149c +3:137,149c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:143a +2:151,162c +3:151,162c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/json', 'application/xml'] + ); + } + +====1 +1:145,146c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:164,165c +3:164,165c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:148c + // make the API Call +2:167,188c +3:167,188c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:150,158c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet' + ); +2:189a +3:189a +====1 +1:160c + return [null, $statusCode, $httpHeader]; +2:191,213c +3:191,213c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:164c + +2:216a +3:216a +====1 +1:168c + +2:219a +3:219a +====1 +1:176a +2:228c +3:228c + * @throws \InvalidArgumentException +====1 +1:181,182c + list($response) = $this->deletePetWithHttpInfo($pet_id, $api_key); + return $response; +2:233c +3:233c + $this->deletePetWithHttpInfo($pet_id, $api_key); +====1 +1:192a +2:244c +3:244c + * @throws \InvalidArgumentException +====1 +1:201,203c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:253,255c +3:253,255c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:206,211c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:258,260c +3:258,260c + $httpBody = ''; + $multipart = false; + $returnType = ''; +====1 +1:215c + $headerParams['api_key'] = $this->apiClient->getSerializer()->toHeaderValue($api_key); +2:264c +3:264c + $headerParams['api_key'] = ObjectSerializer::toHeaderValue($api_key); +====1 +1:216a +2:266c +3:266c + +====1 +1:219,223c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:269c +3:269c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +==== +1:225c + +2:271,277c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:271,272c + + +====1 +1:228a +2:281c +3:276c + +====1 +1:230c + $httpBody = $formParams; // for HTTP post (form) +2:283,306c +3:278,301c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:231a +2:308c +3:303c + +====1 +1:233,234c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:310,311c +3:305,306c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:236c + // make the API Call +2:313,334c +3:308,329c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + +====1 +1:238,246c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet/{petId}' + ); +2:335a +3:330a +====1 +1:248c + return [null, $statusCode, $httpHeader]; +2:337,359c +3:332,354c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:252c + +2:362a +3:357a +====1 +1:256c + +2:365a +3:360a +====1 +1:263a +2:373c +3:368c + * @throws \InvalidArgumentException +====1 +1:278a +2:389c +3:384c + * @throws \InvalidArgumentException +====1 +1:287,289c + // parse inputs + $resourcePath = "/pet/findByStatus"; + $httpBody = ''; +2:398,400c +3:393,395c + + $resourcePath = '/pet/findByStatus'; + $formParams = []; +====1 +1:292,297c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:403,405c +3:398,400c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet[]'; +====1 +1:301c + $status = $this->apiClient->getSerializer()->serializeCollection($status, 'csv', true); +2:409c +3:404c + $status = ObjectSerializer::serializeCollection($status, 'csv', true); +====1 +1:304c + $queryParams['status'] = $this->apiClient->getSerializer()->toQueryValue($status); +2:412c +3:407c + $queryParams['status'] = ObjectSerializer::toQueryValue($status); +==== +1:306c + +2:414,421c + + <<<<<<< HEAD + + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:409,411c + + + +====1 +1:309a +2:425c +3:415c + +====1 +1:311c + $httpBody = $formParams; // for HTTP post (form) +2:427,450c +3:417,440c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:312a +2:452c +3:442c + +====1 +1:314,315c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:454,455c +3:444,445c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:317c + // make the API Call +2:457,478c +3:447,468c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:319,327c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet[]', + '/pet/findByStatus' + ); +2:479a +3:469a +====1 +1:329c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet[]', $httpHeader), $statusCode, $httpHeader]; +2:481,517c +3:471,507c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:333c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +2:521c +3:511c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +====1 +1:337c + +2:524a +3:514a +====1 +1:341c + +2:527a +3:517a +====1 +1:348a +2:535c +3:525c + * @throws \InvalidArgumentException +====1 +1:363a +2:551c +3:541c + * @throws \InvalidArgumentException +====1 +1:372,374c + // parse inputs + $resourcePath = "/pet/findByTags"; + $httpBody = ''; +2:560,562c +3:550,552c + + $resourcePath = '/pet/findByTags'; + $formParams = []; +====1 +1:377,382c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:565,567c +3:555,557c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet[]'; +====1 +1:386c + $tags = $this->apiClient->getSerializer()->serializeCollection($tags, 'csv', true); +2:571c +3:561c + $tags = ObjectSerializer::serializeCollection($tags, 'csv', true); +====1 +1:389c + $queryParams['tags'] = $this->apiClient->getSerializer()->toQueryValue($tags); +2:574c +3:564c + $queryParams['tags'] = ObjectSerializer::toQueryValue($tags); +==== +1:391c + +2:576,583c + + <<<<<<< HEAD + + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:566,568c + + + +====1 +1:394a +2:587c +3:572c + +====1 +1:396c + $httpBody = $formParams; // for HTTP post (form) +2:589,601c +3:574,586c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:397a +2:603,614c +3:588,599c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + +====1 +1:399,400c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:616,617c +3:601,602c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:402c + // make the API Call +2:619,640c +3:604,625c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:404,412c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet[]', + '/pet/findByTags' + ); +2:641a +3:626a +====1 +1:414c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet[]', $httpHeader), $statusCode, $httpHeader]; +2:643,679c +3:628,664c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:418c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +2:683c +3:668c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +====1 +1:422c + +2:686a +3:671a +====1 +1:426c + +2:689a +3:674a +====1 +1:433a +2:697c +3:682c + * @throws \InvalidArgumentException +====1 +1:448a +2:713c +3:698c + * @throws \InvalidArgumentException +====1 +1:457,459c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:722,724c +3:707,709c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:462,467c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:727,730c +3:712,715c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet'; + +====1 +1:471,475c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:734c +3:719c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +==== +1:477c + +2:736,742c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:721,722c + + +====1 +1:480a +2:746c +3:726c + +====1 +1:482c + $httpBody = $formParams; // for HTTP post (form) +2:748,760c +3:728,740c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:484,487c + // this endpoint requires API key authentication + $apiKey = $this->apiClient->getApiKeyWithPrefix('api_key'); + if (strlen($apiKey) !== 0) { + $headerParams['api_key'] = $apiKey; +2:762,771c +3:742,751c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:489c + // make the API Call +2:773,800c +3:753,780c + + // this endpoint requires API key authentication + $apiKey = $this->config->getApiKeyWithPrefix('api_key'); + if ($apiKey !== null) { + $headers['api_key'] = $apiKey; + } + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:491,499c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet', + '/pet/{petId}' + ); +2:801a +3:781a +====1 +1:501c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet', $httpHeader), $statusCode, $httpHeader]; +2:803,839c +3:783,819c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:505c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet', $e->getResponseHeaders()); +2:843c +3:823c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet', $e->getResponseHeaders()); +====1 +1:509c + +2:846a +3:826a +====1 +1:513c + +2:849a +3:829a +====1 +1:520a +2:857c +3:837c + * @throws \InvalidArgumentException +====1 +1:525,526c + list($response) = $this->updatePetWithHttpInfo($body); + return $response; +2:862c +3:842c + $this->updatePetWithHttpInfo($body); +====1 +1:535a +2:872c +3:852c + * @throws \InvalidArgumentException +====1 +1:544,546c + // parse inputs + $resourcePath = "/pet"; + $httpBody = ''; +2:881,883c +3:861,863c + + $resourcePath = '/pet'; + $formParams = []; +====1 +1:549,554c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/json', 'application/xml']); +2:886,890c +3:866,870c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:564a +2:901c +3:881c + +====1 +1:566c + $httpBody = $formParams; // for HTTP post (form) +2:903,915c +3:883,895c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:567a +2:917,928c +3:897,908c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/json', 'application/xml'] + ); + } + +====1 +1:569,570c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:930,931c +3:910,911c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:572c + // make the API Call +2:933,954c +3:913,934c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'PUT', + $url, + $headers, + $httpBody + ); + +====1 +1:574,582c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'PUT', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet' + ); +2:955a +3:935a +====1 +1:584c + return [null, $statusCode, $httpHeader]; +2:957,979c +3:937,959c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:588c + +2:982a +3:962a +====1 +1:592c + +2:985a +3:965a +====1 +1:601a +2:995c +3:975c + * @throws \InvalidArgumentException +====1 +1:606,607c + list($response) = $this->updatePetWithFormWithHttpInfo($pet_id, $name, $status); + return $response; +2:1000c +3:980c + $this->updatePetWithFormWithHttpInfo($pet_id, $name, $status); +====1 +1:618a +2:1012c +3:992c + * @throws \InvalidArgumentException +====1 +1:627,629c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:1021,1023c +3:1001,1003c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:632,637c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/x-www-form-urlencoded']); +2:1026,1029c +3:1006,1009c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:641,645c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:1033c +3:1013c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +====1 +1:646a +2:1035c +3:1015c + +====1 +1:649c + $formParams['name'] = $this->apiClient->getSerializer()->toFormValue($name); +2:1038c +3:1018c + $formParams['name'] = ObjectSerializer::toFormValue($name); +====1 +1:653c + $formParams['status'] = $this->apiClient->getSerializer()->toFormValue($status); +2:1042c +3:1022c + $formParams['status'] = ObjectSerializer::toFormValue($status); +====1 +1:655c + +2:1044c +3:1024c + +====1 +1:658a +2:1048c +3:1028c + +====1 +1:660c + $httpBody = $formParams; // for HTTP post (form) +2:1050,1073c +3:1030,1053c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/x-www-form-urlencoded'] + ); +====1 +1:661a +2:1075c +3:1055c + +====1 +1:663,664c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:1077,1078c +3:1057,1058c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:666c + // make the API Call +2:1080,1101c +3:1060,1081c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:668,676c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet/{petId}' + ); +2:1102a +3:1082a +====1 +1:678c + return [null, $statusCode, $httpHeader]; +2:1104,1126c +3:1084,1106c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:682c + +2:1129a +3:1109a +====1 +1:686c + +2:1132a +3:1112a +====1 +1:695a +2:1142c +3:1122c + * @throws \InvalidArgumentException +====1 +1:712a +2:1160c +3:1140c + * @throws \InvalidArgumentException +====1 +1:721,723c + // parse inputs + $resourcePath = "/pet/{petId}/uploadImage"; + $httpBody = ''; +2:1169,1171c +3:1149,1151c + + $resourcePath = '/pet/{petId}/uploadImage'; + $formParams = []; +====1 +1:726,731c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['multipart/form-data']); +2:1174,1177c +3:1154,1157c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\ApiResponse'; + +====1 +1:735,739c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:1181c +3:1161c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +====1 +1:740a +2:1183c +3:1163c + +====1 +1:743c + $formParams['additionalMetadata'] = $this->apiClient->getSerializer()->toFormValue($additional_metadata); +2:1186c +3:1166c + $formParams['additionalMetadata'] = ObjectSerializer::toFormValue($additional_metadata); +====1 +1:747,753c + // PHP 5.5 introduced a CurlFile object that deprecates the old @filename syntax + // See: https://wiki.php.net/rfc/curl-file-upload + if (function_exists('curl_file_create')) { + $formParams['file'] = curl_file_create($this->apiClient->getSerializer()->toFormValue($file)); + } else { + $formParams['file'] = '@' . $this->apiClient->getSerializer()->toFormValue($file); + } +2:1190,1191c +3:1170,1171c + $multipart = true; + $formParams['file'] = \GuzzleHttp\Psr7\try_fopen(ObjectSerializer::toFormValue($file), 'rb'); +====1 +1:755c + +2:1193c +3:1173c + +====1 +1:758a +2:1197c +3:1177c + +====1 +1:760c + $httpBody = $formParams; // for HTTP post (form) +2:1199,1211c +3:1179,1191c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:761a +2:1213,1224c +3:1193,1204c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/json'], + ['multipart/form-data'] + ); + } + +====1 +1:763,764c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:1226,1227c +3:1206,1207c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:766c + // make the API Call +2:1229,1250c +3:1209,1230c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:768,776c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\ApiResponse', + '/pet/{petId}/uploadImage' + ); +2:1251a +3:1231a +====1 +1:778c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\ApiResponse', $httpHeader), $statusCode, $httpHeader]; +2:1253,1289c +3:1233,1269c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:782c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\ApiResponse', $e->getResponseHeaders()); +2:1293c +3:1273c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\ApiResponse', $e->getResponseHeaders()); +====1 +1:786c + +2:1296a +3:1276a diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort/diff_StoreApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort/diff_StoreApi.php.txt new file mode 100644 index 0000000000..f8e1e3ce2b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort/diff_StoreApi.php.txt @@ -0,0 +1,830 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return StoreApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->deleteOrderWithHttpInfo($order_id); + return $response; +2:96c +3:96c + $this->deleteOrderWithHttpInfo($order_id); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/store/order/{order_id}"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/store/order/{order_id}'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:120,123c +3:120,123c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:134,138c + $resourcePath = str_replace( + "{" . "order_id" . "}", + $this->apiClient->getSerializer()->toPathValue($order_id), + $resourcePath + ); +2:127c +3:127c + $resourcePath = str_replace('{' . 'order_id' . '}', ObjectSerializer::toPathValue($order_id), $resourcePath); +==== +1:140c + +2:129,135c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:129,130c + + +====1 +1:143a +2:139c +3:134c + +====1 +1:145c + $httpBody = $formParams; // for HTTP post (form) +2:141,153c +3:136,148c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:147,156c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/store/order/{order_id}' +2:155,163c +3:150,158c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:157a +2:165,212c +3:160,207c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:159c + return [null, $statusCode, $httpHeader]; +2:213a +3:208a +====1 +1:163c + +2:216a +3:211a +====1 +1:167c + +2:219a +3:214a +====1 +1:173a +2:226c +3:221c + * @throws \InvalidArgumentException +====1 +1:187a +2:241c +3:236c + * @throws \InvalidArgumentException +====1 +1:192,194c + // parse inputs + $resourcePath = "/store/inventory"; + $httpBody = ''; +2:246,248c +3:241,243c + + $resourcePath = '/store/inventory'; + $formParams = []; +====1 +1:197,202c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:251,256c +3:246,251c + $httpBody = ''; + $multipart = false; + $returnType = 'map[string,int]'; + + + +====1 +1:204c + +2:257a +3:252a +====1 +1:207a +2:261c +3:256c + +====1 +1:209c + $httpBody = $formParams; // for HTTP post (form) +2:263,275c +3:258,270c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:211,214c + // this endpoint requires API key authentication + $apiKey = $this->apiClient->getApiKeyWithPrefix('api_key'); + if (strlen($apiKey) !== 0) { + $headerParams['api_key'] = $apiKey; +2:277,286c +3:272,281c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/json'], + [] + ); +====1 +1:216c + // make the API Call +2:288,315c +3:283,310c + + // this endpoint requires API key authentication + $apiKey = $this->config->getApiKeyWithPrefix('api_key'); + if ($apiKey !== null) { + $headers['api_key'] = $apiKey; + } + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:218,226c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + 'map[string,int]', + '/store/inventory' + ); +2:316a +3:311a +====1 +1:228c + return [$this->apiClient->getSerializer()->deserialize($response, 'map[string,int]', $httpHeader), $statusCode, $httpHeader]; +2:318,354c +3:313,349c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:232c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), 'map[string,int]', $e->getResponseHeaders()); +2:358c +3:353c + $data = ObjectSerializer::deserialize($e->getResponseBody(), 'map[string,int]', $e->getResponseHeaders()); +====1 +1:236c + +2:361a +3:356a +====1 +1:240c + +2:364a +3:359a +====1 +1:247a +2:372c +3:367c + * @throws \InvalidArgumentException +====1 +1:262a +2:388c +3:383c + * @throws \InvalidArgumentException +====1 +1:271c + if (($order_id > 5)) { +2:397c +3:392c + if ($order_id > 5) { +====1 +1:274c + if (($order_id < 1)) { +2:400c +3:395c + if ($order_id < 1) { +====1 +1:278,280c + // parse inputs + $resourcePath = "/store/order/{order_id}"; + $httpBody = ''; +2:404,406c +3:399,401c + + $resourcePath = '/store/order/{order_id}'; + $formParams = []; +====1 +1:283,288c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:409,412c +3:404,407c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Order'; + +====1 +1:292,296c + $resourcePath = str_replace( + "{" . "order_id" . "}", + $this->apiClient->getSerializer()->toPathValue($order_id), + $resourcePath + ); +2:416c +3:411c + $resourcePath = str_replace('{' . 'order_id' . '}', ObjectSerializer::toPathValue($order_id), $resourcePath); +==== +1:298c + +2:418,424c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:413,414c + + +====1 +1:301a +2:428c +3:418c + +====1 +1:303c + $httpBody = $formParams; // for HTTP post (form) +2:430,442c +3:420,432c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:305,314c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Order', + '/store/order/{order_id}' +2:444,452c +3:434,442c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:315a +2:454,515c +3:444,505c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:317c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Order', $httpHeader), $statusCode, $httpHeader]; +2:516a +3:506a +====1 +1:321c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +2:520c +3:510c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +====1 +1:325c + +2:523a +3:513a +====1 +1:329c + +2:526a +3:516a +====1 +1:336a +2:534c +3:524c + * @throws \InvalidArgumentException +====1 +1:351a +2:550c +3:540c + * @throws \InvalidArgumentException +====1 +1:360,362c + // parse inputs + $resourcePath = "/store/order"; + $httpBody = ''; +2:559,561c +3:549,551c + + $resourcePath = '/store/order'; + $formParams = []; +====1 +1:365,370c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:564,568c +3:554,558c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Order'; + + +====1 +1:380a +2:579c +3:569c + +====1 +1:382c + $httpBody = $formParams; // for HTTP post (form) +2:581,593c +3:571,583c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:384,393c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Order', + '/store/order' +2:595,598c +3:585,588c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:394a +2:600,666c +3:590,656c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:396c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Order', $httpHeader), $statusCode, $httpHeader]; +2:667a +3:657a +====1 +1:400c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +2:671c +3:661c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +====1 +1:404c + +2:674a +3:664a diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort/diff_UserApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort/diff_UserApi.php.txt new file mode 100644 index 0000000000..49eb133815 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort/diff_UserApi.php.txt @@ -0,0 +1,1453 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return UserApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->createUserWithHttpInfo($body); + return $response; +2:96c +3:96c + $this->createUserWithHttpInfo($body); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/user"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/user'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:120,124c +3:120,124c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:140a +2:135c +3:135c + +====1 +1:142c + $httpBody = $formParams; // for HTTP post (form) +2:137,149c +3:137,149c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:144,153c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user' +2:151,154c +3:151,154c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:154a +2:156,208c +3:156,208c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:156c + return [null, $statusCode, $httpHeader]; +2:209a +3:209a +====1 +1:160c + +2:212a +3:212a +====1 +1:164c + +2:215a +3:215a +====1 +1:171a +2:223c +3:223c + * @throws \InvalidArgumentException +====1 +1:176,177c + list($response) = $this->createUsersWithArrayInputWithHttpInfo($body); + return $response; +2:228c +3:228c + $this->createUsersWithArrayInputWithHttpInfo($body); +====1 +1:186a +2:238c +3:238c + * @throws \InvalidArgumentException +====1 +1:195,197c + // parse inputs + $resourcePath = "/user/createWithArray"; + $httpBody = ''; +2:247,249c +3:247,249c + + $resourcePath = '/user/createWithArray'; + $formParams = []; +====1 +1:200,205c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:252,256c +3:252,256c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:215a +2:267c +3:267c + +====1 +1:217c + $httpBody = $formParams; // for HTTP post (form) +2:269,281c +3:269,281c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:219,228c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/createWithArray' +2:283,291c +3:283,291c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:229a +2:293,340c +3:293,340c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:231c + return [null, $statusCode, $httpHeader]; +2:341a +3:341a +====1 +1:235c + +2:344a +3:344a +====1 +1:239c + +2:347a +3:347a +====1 +1:246a +2:355c +3:355c + * @throws \InvalidArgumentException +====1 +1:251,252c + list($response) = $this->createUsersWithListInputWithHttpInfo($body); + return $response; +2:360c +3:360c + $this->createUsersWithListInputWithHttpInfo($body); +====1 +1:261a +2:370c +3:370c + * @throws \InvalidArgumentException +====1 +1:270,272c + // parse inputs + $resourcePath = "/user/createWithList"; + $httpBody = ''; +2:379,381c +3:379,381c + + $resourcePath = '/user/createWithList'; + $formParams = []; +====1 +1:275,280c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:384,388c +3:384,388c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:290a +2:399c +3:399c + +====1 +1:292c + $httpBody = $formParams; // for HTTP post (form) +2:401,413c +3:401,413c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:294,303c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/createWithList' +2:415,423c +3:415,423c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:304a +2:425,472c +3:425,472c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:306c + return [null, $statusCode, $httpHeader]; +2:473a +3:473a +====1 +1:310c + +2:476a +3:476a +====1 +1:314c + +2:479a +3:479a +====1 +1:321a +2:487c +3:487c + * @throws \InvalidArgumentException +====1 +1:326,327c + list($response) = $this->deleteUserWithHttpInfo($username); + return $response; +2:492c +3:492c + $this->deleteUserWithHttpInfo($username); +====1 +1:336a +2:502c +3:502c + * @throws \InvalidArgumentException +====1 +1:345,347c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:511,513c +3:511,513c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:350,355c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:516,519c +3:516,519c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:359,363c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:523c +3:523c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +==== +1:365c + +2:525,531c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:525,526c + + +====1 +1:368a +2:535c +3:530c + +====1 +1:370c + $httpBody = $formParams; // for HTTP post (form) +2:537,549c +3:532,544c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:372,381c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/{username}' +2:551,559c +3:546,554c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:382a +2:561,608c +3:556,603c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:384c + return [null, $statusCode, $httpHeader]; +2:609a +3:604a +====1 +1:388c + +2:612a +3:607a +====1 +1:392c + +2:615a +3:610a +====1 +1:399a +2:623c +3:618c + * @throws \InvalidArgumentException +====1 +1:414a +2:639c +3:634c + * @throws \InvalidArgumentException +====1 +1:423,425c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:648,650c +3:643,645c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:428,433c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:653,656c +3:648,651c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\User'; + +====1 +1:437,441c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:660c +3:655c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +==== +1:443c + +2:662,668c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:657,658c + + +====1 +1:446a +2:672c +3:662c + +====1 +1:448c + $httpBody = $formParams; // for HTTP post (form) +2:674,686c +3:664,676c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:450,459c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\User', + '/user/{username}' +2:688,691c +3:678,681c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:460a +2:693,759c +3:683,749c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:462c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\User', $httpHeader), $statusCode, $httpHeader]; +2:760a +3:750a +====1 +1:466c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\User', $e->getResponseHeaders()); +2:764c +3:754c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\User', $e->getResponseHeaders()); +====1 +1:470c + +2:767a +3:757a +====1 +1:474c + +2:770a +3:760a +====1 +1:482a +2:779c +3:769c + * @throws \InvalidArgumentException +====1 +1:498a +2:796c +3:786c + * @throws \InvalidArgumentException +====1 +1:511,513c + // parse inputs + $resourcePath = "/user/login"; + $httpBody = ''; +2:809,811c +3:799,801c + + $resourcePath = '/user/login'; + $formParams = []; +====1 +1:516,521c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:814,816c +3:804,806c + $httpBody = ''; + $multipart = false; + $returnType = 'string'; +====1 +1:525c + $queryParams['username'] = $this->apiClient->getSerializer()->toQueryValue($username); +2:820c +3:810c + $queryParams['username'] = ObjectSerializer::toQueryValue($username); +====1 +1:529c + $queryParams['password'] = $this->apiClient->getSerializer()->toQueryValue($password); +2:824c +3:814c + $queryParams['password'] = ObjectSerializer::toQueryValue($password); +==== +1:531c + +2:826,833c + + <<<<<<< HEAD + + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:816,818c + + + +====1 +1:534a +2:837c +3:822c + +====1 +1:536c + $httpBody = $formParams; // for HTTP post (form) +2:839,851c +3:824,836c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:538,547c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + 'string', + '/user/login' +2:853,861c +3:838,846c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:548a +2:863,924c +3:848,909c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:550c + return [$this->apiClient->getSerializer()->deserialize($response, 'string', $httpHeader), $statusCode, $httpHeader]; +2:925a +3:910a +====1 +1:554c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), 'string', $e->getResponseHeaders()); +2:929c +3:914c + $data = ObjectSerializer::deserialize($e->getResponseBody(), 'string', $e->getResponseHeaders()); +====1 +1:558c + +2:932a +3:917a +====1 +1:562c + +2:935a +3:920a +====1 +1:568a +2:942c +3:927c + * @throws \InvalidArgumentException +====1 +1:573,574c + list($response) = $this->logoutUserWithHttpInfo(); + return $response; +2:947c +3:932c + $this->logoutUserWithHttpInfo(); +====1 +1:582a +2:956c +3:941c + * @throws \InvalidArgumentException +====1 +1:587,589c + // parse inputs + $resourcePath = "/user/logout"; + $httpBody = ''; +2:961,963c +3:946,948c + + $resourcePath = '/user/logout'; + $formParams = []; +====1 +1:592,597c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:966,971c +3:951,956c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + + +====1 +1:599c + +2:972a +3:957a +====1 +1:602a +2:976c +3:961c + +====1 +1:604c + $httpBody = $formParams; // for HTTP post (form) +2:978,990c +3:963,975c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:606,615c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/logout' +2:992,995c +3:977,980c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:616a +2:997,1049c +3:982,1034c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:618c + return [null, $statusCode, $httpHeader]; +2:1050a +3:1035a +====1 +1:622c + +2:1053a +3:1038a +====1 +1:626c + +2:1056a +3:1041a +====1 +1:634a +2:1065c +3:1050c + * @throws \InvalidArgumentException +====1 +1:639,640c + list($response) = $this->updateUserWithHttpInfo($username, $body); + return $response; +2:1070c +3:1055c + $this->updateUserWithHttpInfo($username, $body); +====1 +1:650a +2:1081c +3:1066c + * @throws \InvalidArgumentException +====1 +1:663,665c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:1094,1096c +3:1079,1081c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:668,673c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:1099,1102c +3:1084,1087c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:677,681c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:1106c +3:1091c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +====1 +1:682a +2:1108c +3:1093c + +====1 +1:691a +2:1118c +3:1103c + +====1 +1:693c + $httpBody = $formParams; // for HTTP post (form) +2:1120,1132c +3:1105,1117c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:695,704c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'PUT', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/{username}' +2:1134,1142c +3:1119,1127c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:705a +2:1144,1191c +3:1129,1176c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'PUT', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:707c + return [null, $statusCode, $httpHeader]; +2:1192a +3:1177a +====1 +1:711c + +2:1195a +3:1180a diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort/diff_VERSION.txt new file mode 100644 index 0000000000..ce107c7353 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort/diff_VERSION.txt @@ -0,0 +1,10 @@ +1,6c1 +< <<<<<<< HEAD +< 2.3.0-SNAPSHOT +< ||||||| 4479382ced +< ======= +< 2.2.3-SNAPSHOT +< >>>>>>> TEMP_RIGHT_BRANCH +--- +> 2.3.0-SNAPSHOT +\ No newline at end of file diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort/diff_io.swagger.codegen.CodegenConfig.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort/diff_io.swagger.codegen.CodegenConfig.txt new file mode 100644 index 0000000000..c523742933 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort/diff_io.swagger.codegen.CodegenConfig.txt @@ -0,0 +1,20 @@ +==== +1:2c + io.swagger.codegen.languages.AspNet5ServerCodegen +2:2,8c + <<<<<<< HEAD + ||||||| 4479382ced + io.swagger.codegen.languages.AspNet5ServerCodegen + ======= + io.swagger.codegen.languages.ApexClientCodegen + io.swagger.codegen.languages.AspNet5ServerCodegen + >>>>>>> TEMP_RIGHT_BRANCH +3:2c + io.swagger.codegen.languages.ApexClientCodegen +====3 +1:67c +2:73c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen +\ No newline at end of file +3:67c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_adjacent/diff_Models.swift.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_adjacent/diff_Models.swift.txt new file mode 100644 index 0000000000..8a5c872118 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_adjacent/diff_Models.swift.txt @@ -0,0 +1,1866 @@ +====1 +1:14c + case Error(Int, Data?, Error) +2:14,15c +3:14,15c + case HttpError(statusCode: Int, data: Data?, error: Error) + case DecodeError(response: Data?, decodeError: DecodeError) +====1 +1:37a +2:39,60c +3:39,60c + public enum Decoded { + case success(ValueType) + case failure(DecodeError) + } + + public extension Decoded { + var value: ValueType? { + switch self { + case let .success(value): + return value + case .failure: + return nil + } + } + } + + public enum DecodeError { + case typeMismatch(expected: String, actual: String) + case missingKey(key: String) + case parseError(message: String) + } + +====1 +1:42c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> T)) { +2:65c +3:65c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> Decoded)) { +====1 +1:47,50c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> T { + let key = discriminator; + if let decoder = decoders[key] { + return decoder(source, nil) as! T +2:70,73c +3:70,73c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> Decoded { + let key = discriminator + if let decoder = decoders[key], let value = decoder(source, nil) as? Decoded { + return value +====1 +1:52c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:75c +3:75c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:56,58c + static func decode(clazz: [T].Type, source: AnyObject) -> [T] { + let array = source as! [AnyObject] + return array.map { Decoders.decode(clazz: T.self, source: $0, instance: nil) } +2:79,93c +3:79,93c + static func decode(clazz: [T].Type, source: AnyObject) -> Decoded<[T]> { + if let sourceArray = source as? [AnyObject] { + var values = [T]() + for sourceValue in sourceArray { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): + values.append(value) + case let .failure(error): + return .failure(error) + } + } + return .success(values) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } +====1 +1:61,65c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> [Key:T] { + let sourceDictionary = source as! [Key: AnyObject] + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + dictionary[key] = Decoders.decode(clazz: T.self, source: value, instance: nil) +2:96,122c +3:96,122c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> Decoded<[Key:T]> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): + dictionary[key] = value + case let .failure(error): + return .failure(error) + } + } + return .success(dictionary) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } + } + + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + guard !(source is NSNull), source != nil else { return .success(nil) } + if let value = source as? T.RawValue { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "\(T.RawValue.self) matching a case from the enumeration \(T.self)", actual: String(describing: type(of: source)))) +====1 +1:67c + return dictionary +2:123a +3:123a +====1 +1:70c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> T { +2:126c +3:126c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> Decoded { +====1 +1:72,73c + if T.self is Int32.Type && source is NSNumber { + return (source as! NSNumber).int32Value as! T; +2:128,129c +3:128,129c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int32.Type { + return .success(value) +====1 +1:75,76c + if T.self is Int64.Type && source is NSNumber { + return source.int64Value as! T; +2:131,132c +3:131,132c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int64.Type { + return .success(value) +====1 +1:78,79c + if T.self is UUID.Type && source is String { + return UUID(uuidString: source as! String) as! T +2:134,135c +3:134,135c + if let intermediate = source as? String, let value = UUID(uuidString: intermediate) as? T, source is String, T.self is UUID.Type { + return .success(value) +====1 +1:81,82c + if source is T { + return source as! T +2:137,138c +3:137,138c + if let value = source as? T { + return .success(value) +====1 +1:84,85c + if T.self is Data.Type && source is String { + return Data(base64Encoded: source as! String) as! T +2:140,141c +3:140,141c + if let intermediate = source as? String, let value = Data(base64Encoded: intermediate) as? T { + return .success(value) +====1 +1:89,90c + if let decoder = decoders[key] { + return decoder(source, instance) as! T +2:145,146c +3:145,146c + if let decoder = decoders[key], let value = decoder(source, instance) as? Decoded { + return value +====1 +1:92c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:148c +3:148c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:96,102c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> T? { + if source is NSNull { + return nil + } + return source.map { (source: AnyObject) -> T in + Decoders.decode(clazz: clazz, source: source, instance: nil) + } +2:152,154c +3:152,154c + //Convert a Decoded so that its value is optional. DO WE STILL NEED THIS? + static func toOptional(decoded: Decoded) -> Decoded { + return .success(decoded.value) +====1 +1:105,107c + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> [T]? { + if source is NSNull { + return nil +2:157,164c +3:157,164c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + if let source = source, !(source is NSNull) { + switch Decoders.decode(clazz: clazz, source: source, instance: nil) { + case let .success(value): return .success(value) + case let .failure(error): return .failure(error) + } + } else { + return .success(nil) +====1 +1:109,110c + return source.map { (someSource: AnyObject) -> [T] in + Decoders.decode(clazz: clazz, source: someSource) +2:166,179c +3:166,179c + } + + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> Decoded<[T]?> { + if let source = source as? [AnyObject] { + var values = [T]() + for sourceValue in source { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): values.append(value) + case let .failure(error): return .failure(error) + } + } + return .success(values) + } else { + return .success(nil) +====1 +1:114,116c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> [Key:T]? { + if source is NSNull { + return nil +2:183,194c +3:183,194c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> Decoded<[Key:T]?> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): dictionary[key] = value + case let .failure(error): return .failure(error) + } + } + return .success(dictionary) + } else { + return .success(nil) +====1 +1:118,119c + return source.map { (someSource: AnyObject) -> [Key:T] in + Decoders.decode(clazz: clazz, source: someSource) +2:196,206c +3:196,206c + } + + static func decodeOptional(clazz: T, source: AnyObject) -> Decoded { + if let value = source as? U { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "String", actual: String(describing: type(of: source)))) +====1 +1:122a +2:210c +3:210c + +====1 +1:137c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Date in +2:225c +3:225c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:141c + return date +2:229c +3:229c + return .success(date) +====1 +1:145c + if let sourceInt = source as? Int64 { +2:233c +3:233c + if let sourceInt = source as? Int { +====1 +1:147c + return Date(timeIntervalSince1970: Double(sourceInt / 1000) ) +2:235,250c +3:235,250c + return .success(Date(timeIntervalSince1970: Double(sourceInt / 1000) )) + } + if source is String || source is Int { + return .failure(.parseError(message: "Could not decode date")) + } else { + return .failure(.typeMismatch(expected: "String or Int", actual: "\(source)")) + } + } + + // Decoder for ISOFullDate + Decoders.addDecoder(clazz: ISOFullDate.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let string = source as? String, + let isoDate = ISOFullDate.from(string: string) { + return .success(isoDate) + } else { + return .failure(.typeMismatch(expected: "ISO date", actual: "\(source)")) +====1 +1:149c + fatalError("formatter failed to parse \(source)") +2:251a +3:251a +====1 +1:152,155c + // Decoder for [AdditionalPropertiesClass] + Decoders.addDecoder(clazz: [AdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [AdditionalPropertiesClass] in + return Decoders.decode(clazz: [AdditionalPropertiesClass].self, source: source) + } +2:253a +3:253a +====1 +1:157,163c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> AdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + + result.mapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_property"] as AnyObject?) + result.mapOfMapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_of_map_property"] as AnyObject?) + return result +2:255,273c +3:255,273c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: [String:String].self, source: sourceDictionary["map_property"] as AnyObject?) { + + case let .success(value): result.mapProperty = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_of_map_property"] as AnyObject?) { + + case let .success(value): result.mapOfMapProperty = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "AdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:167,170c + // Decoder for [Animal] + Decoders.addDecoder(clazz: [Animal].self) { (source: AnyObject, instance: AnyObject?) -> [Animal] in + return Decoders.decode(clazz: [Animal].self, source: source) + } +2:276a +3:276a +====1 +1:172,176c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Animal in + let sourceDictionary = source as! [AnyHashable: Any] + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal" { + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) +2:278,299c +3:278,299c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal"{ + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) + } + let result = instance == nil ? Animal() : instance as! Animal + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Animal", actual: "\(source)")) +====1 +1:178,182c + let result = instance == nil ? Animal() : instance as! Animal + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + return result +2:300a +3:300a +====1 +1:186,194c + // Decoder for [AnimalFarm] + Decoders.addDecoder(clazz: [AnimalFarm].self) { (source: AnyObject, instance: AnyObject?) -> [AnimalFarm] in + return Decoders.decode(clazz: [AnimalFarm].self, source: source) + } + // Decoder for AnimalFarm + Decoders.addDecoder(clazz: AnimalFarm.self) { (source: AnyObject, instance: AnyObject?) -> AnimalFarm in + let sourceArray = source as! [AnyObject] + return sourceArray.map({ Decoders.decode(clazz: Animal.self, source: $0, instance: nil) }) + } +2:303a +3:303a +====1 +1:197,200c + // Decoder for [ApiResponse] + Decoders.addDecoder(clazz: [ApiResponse].self) { (source: AnyObject, instance: AnyObject?) -> [ApiResponse] in + return Decoders.decode(clazz: [ApiResponse].self, source: source) + } +2:306c +3:306c + +====1 +1:202,209c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> ApiResponse in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + + result.code = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) + result.type = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) + result.message = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) + return result +2:308,332c +3:308,332c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) { + + case let .success(value): result.code = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) { + + case let .success(value): result.type = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) { + + case let .success(value): result.message = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ApiResponse", actual: "\(source)")) + } +====1 +1:213,216c + // Decoder for [ArrayOfArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfArrayOfNumberOnly].self, source: source) + } +2:335a +3:335a +====1 +1:218,223c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + + result.arrayArrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) + return result +2:337,349c +3:337,349c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [[Double]].self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayArrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:227,230c + // Decoder for [ArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfNumberOnly].self, source: source) + } +2:352a +3:352a +====1 +1:232,237c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + + result.arrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayNumber"] as AnyObject?) + return result +2:354,366c +3:354,366c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [Double].self, source: sourceDictionary["ArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:241,244c + // Decoder for [ArrayTest] + Decoders.addDecoder(clazz: [ArrayTest].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayTest] in + return Decoders.decode(clazz: [ArrayTest].self, source: source) + } +2:369a +3:369a +====1 +1:246,253c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> ArrayTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + + result.arrayOfString = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_of_string"] as AnyObject?) + result.arrayArrayOfInteger = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) + result.arrayArrayOfModel = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_model"] as AnyObject?) + return result +2:371,395c +3:371,395c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["array_of_string"] as AnyObject?) { + + case let .success(value): result.arrayOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[Int64]].self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[ReadOnlyFirst]].self, source: sourceDictionary["array_array_of_model"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfModel = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayTest", actual: "\(source)")) + } +====1 +1:257,260c + // Decoder for [Capitalization] + Decoders.addDecoder(clazz: [Capitalization].self) { (source: AnyObject, instance: AnyObject?) -> [Capitalization] in + return Decoders.decode(clazz: [Capitalization].self, source: source) + } +2:398a +3:398a +====1 +1:262,272c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Capitalization in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Capitalization() : instance as! Capitalization + + result.smallCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) + result.capitalCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) + result.smallSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) + result.capitalSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) + result.sCAETHFlowPoints = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) + result.ATT_NAME = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) + return result +2:400,442c +3:400,442c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Capitalization() : instance as! Capitalization + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) { + + case let .success(value): result.smallCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) { + + case let .success(value): result.capitalCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) { + + case let .success(value): result.smallSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) { + + case let .success(value): result.capitalSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) { + + case let .success(value): result.sCAETHFlowPoints = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) { + + case let .success(value): result.ATT_NAME = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Capitalization", actual: "\(source)")) + } +====1 +1:276,279c + // Decoder for [Cat] + Decoders.addDecoder(clazz: [Cat].self) { (source: AnyObject, instance: AnyObject?) -> [Cat] in + return Decoders.decode(clazz: [Cat].self, source: source) + } +2:445a +3:445a +====1 +1:281,291c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Cat in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.declawed = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) + return result +2:447,474c +3:447,474c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) { + + case let .success(value): result.declawed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Cat", actual: "\(source)")) + } +====1 +1:295,298c + // Decoder for [Category] + Decoders.addDecoder(clazz: [Category].self) { (source: AnyObject, instance: AnyObject?) -> [Category] in + return Decoders.decode(clazz: [Category].self, source: source) + } +2:477a +3:477a +====1 +1:300,306c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Category in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Category() : instance as! Category + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:479,497c +3:479,497c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Category() : instance as! Category + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Category", actual: "\(source)")) + } +====1 +1:310,313c + // Decoder for [ClassModel] + Decoders.addDecoder(clazz: [ClassModel].self) { (source: AnyObject, instance: AnyObject?) -> [ClassModel] in + return Decoders.decode(clazz: [ClassModel].self, source: source) + } +2:500a +3:500a +====1 +1:315,320c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> ClassModel in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ClassModel() : instance as! ClassModel + + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) + return result +2:502,514c +3:502,514c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ClassModel() : instance as! ClassModel + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ClassModel", actual: "\(source)")) + } +====1 +1:324,327c + // Decoder for [Client] + Decoders.addDecoder(clazz: [Client].self) { (source: AnyObject, instance: AnyObject?) -> [Client] in + return Decoders.decode(clazz: [Client].self, source: source) + } +2:517a +3:517a +====1 +1:329,334c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Client in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Client() : instance as! Client + + result.client = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) + return result +2:519,531c +3:519,531c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Client() : instance as! Client + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) { + + case let .success(value): result.client = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Client", actual: "\(source)")) + } +====1 +1:338,341c + // Decoder for [Dog] + Decoders.addDecoder(clazz: [Dog].self) { (source: AnyObject, instance: AnyObject?) -> [Dog] in + return Decoders.decode(clazz: [Dog].self, source: source) + } +2:534a +3:534a +====1 +1:343,353c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Dog in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.breed = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) + return result +2:536,563c +3:536,563c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) { + + case let .success(value): result.breed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Dog", actual: "\(source)")) + } +====1 +1:357,360c + // Decoder for [EnumArrays] + Decoders.addDecoder(clazz: [EnumArrays].self) { (source: AnyObject, instance: AnyObject?) -> [EnumArrays] in + return Decoders.decode(clazz: [EnumArrays].self, source: source) + } +2:566a +3:566a +====1 +1:362,371c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> EnumArrays in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + + if let justSymbol = sourceDictionary["just_symbol"] as? String { + result.justSymbol = EnumArrays.JustSymbol(rawValue: (justSymbol)) + } + + if let arrayEnum = sourceDictionary["array_enum"] as? [String] { + result.arrayEnum = arrayEnum.map ({ EnumArrays.ArrayEnum(rawValue: $0)! }) +2:568,585c +3:568,585c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + switch Decoders.decodeOptional(clazz: EnumArrays.JustSymbol.self, source: sourceDictionary["just_symbol"] as AnyObject?) { + + case let .success(value): result.justSymbol = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_enum"] as AnyObject?) { + + case let .success(value): result.arrayEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumArrays", actual: "\(source)")) +====1 +1:373,374c + + return result +2:586a +3:586a +====1 +1:378,381c + // Decoder for [EnumClass] + Decoders.addDecoder(clazz: [EnumClass].self) { (source: AnyObject, instance: AnyObject?) -> [EnumClass] in + return Decoders.decode(clazz: [EnumClass].self, source: source) + } +2:589a +3:589a +====1 +1:383,389c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> EnumClass in + if let source = source as? String { + if let result = EnumClass(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type EnumClass: Maybe swagger file is insufficient") +2:591,593c +3:591,593c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: EnumClass.self, source: source, instance: instance) +====1 +1:393,396c + // Decoder for [EnumTest] + Decoders.addDecoder(clazz: [EnumTest].self) { (source: AnyObject, instance: AnyObject?) -> [EnumTest] in + return Decoders.decode(clazz: [EnumTest].self, source: source) + } +2:596a +3:596a +====1 +1:398,415c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> EnumTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumTest() : instance as! EnumTest + + if let enumString = sourceDictionary["enum_string"] as? String { + result.enumString = EnumTest.EnumString(rawValue: (enumString)) + } + + if let enumInteger = sourceDictionary["enum_integer"] as? Int32 { + result.enumInteger = EnumTest.EnumInteger(rawValue: (enumInteger)) + } + + if let enumNumber = sourceDictionary["enum_number"] as? Double { + result.enumNumber = EnumTest.EnumNumber(rawValue: (enumNumber)) + } + + result.outerEnum = Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) + return result +2:598,628c +3:598,628c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumTest() : instance as! EnumTest + switch Decoders.decodeOptional(clazz: EnumTest.EnumString.self, source: sourceDictionary["enum_string"] as AnyObject?) { + + case let .success(value): result.enumString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumInteger.self, source: sourceDictionary["enum_integer"] as AnyObject?) { + + case let .success(value): result.enumInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumNumber.self, source: sourceDictionary["enum_number"] as AnyObject?) { + + case let .success(value): result.enumNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) { + + case let .success(value): result.outerEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumTest", actual: "\(source)")) + } +====1 +1:419,422c + // Decoder for [FormatTest] + Decoders.addDecoder(clazz: [FormatTest].self) { (source: AnyObject, instance: AnyObject?) -> [FormatTest] in + return Decoders.decode(clazz: [FormatTest].self, source: source) + } +2:631a +3:631a +====1 +1:424,441c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> FormatTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? FormatTest() : instance as! FormatTest + + result.integer = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) + result.int32 = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) + result.int64 = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) + result.number = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) + result.float = Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) + result.double = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) + result.string = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) + result.byte = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) + result.binary = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) + result.date = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["date"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + return result +2:633,717c +3:633,717c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? FormatTest() : instance as! FormatTest + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) { + + case let .success(value): result.integer = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) { + + case let .success(value): result.int32 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) { + + case let .success(value): result.int64 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) { + + case let .success(value): result.number = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) { + + case let .success(value): result.float = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) { + + case let .success(value): result.double = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) { + + case let .success(value): result.string = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) { + + case let .success(value): result.byte = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) { + + case let .success(value): result.binary = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: ISOFullDate.self, source: sourceDictionary["date"] as AnyObject?) { + + case let .success(value): result.date = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "FormatTest", actual: "\(source)")) + } +====1 +1:445,448c + // Decoder for [HasOnlyReadOnly] + Decoders.addDecoder(clazz: [HasOnlyReadOnly].self) { (source: AnyObject, instance: AnyObject?) -> [HasOnlyReadOnly] in + return Decoders.decode(clazz: [HasOnlyReadOnly].self, source: source) + } +2:720a +3:720a +====1 +1:450,456c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> HasOnlyReadOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.foo = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) + return result +2:722,740c +3:722,740c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) { + + case let .success(value): result.foo = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "HasOnlyReadOnly", actual: "\(source)")) + } +====1 +1:460,463c + // Decoder for [List] + Decoders.addDecoder(clazz: [List].self) { (source: AnyObject, instance: AnyObject?) -> [List] in + return Decoders.decode(clazz: [List].self, source: source) + } +2:743a +3:743a +====1 +1:465,470c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> List in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? List() : instance as! List + + result._123List = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) + return result +2:745,757c +3:745,757c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? List() : instance as! List + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) { + + case let .success(value): result._123List = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "List", actual: "\(source)")) + } +====1 +1:474,477c + // Decoder for [MapTest] + Decoders.addDecoder(clazz: [MapTest].self) { (source: AnyObject, instance: AnyObject?) -> [MapTest] in + return Decoders.decode(clazz: [MapTest].self, source: source) + } +2:760a +3:760a +====1 +1:479,484c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> MapTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MapTest() : instance as! MapTest + + result.mapMapOfString = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_map_of_string"] as AnyObject?) + if let mapOfEnumString = sourceDictionary["map_of_enum_string"] as? [String:String] { //TODO: handle enum map scenario +2:762,779c +3:762,779c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MapTest() : instance as! MapTest + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_map_of_string"] as AnyObject?) { + + case let .success(value): result.mapMapOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: MapTest.MapOfEnumString.self, source: sourceDictionary["map_of_enum_string"] as AnyObject?) { + /* + case let .success(value): result.mapOfEnumString = value + case let .failure(error): return .failure(error) + */ default: break //TODO: handle enum map scenario + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MapTest", actual: "\(source)")) +====1 +1:486,487c + + return result +2:780a +3:780a +====1 +1:491,494c + // Decoder for [MixedPropertiesAndAdditionalPropertiesClass] + Decoders.addDecoder(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [MixedPropertiesAndAdditionalPropertiesClass] in + return Decoders.decode(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self, source: source) + } +2:783a +3:783a +====1 +1:496,503c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> MixedPropertiesAndAdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.map = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map"] as AnyObject?) + return result +2:785,809c +3:785,809c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:Animal].self, source: sourceDictionary["map"] as AnyObject?) { + + case let .success(value): result.map = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MixedPropertiesAndAdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:507,510c + // Decoder for [Model200Response] + Decoders.addDecoder(clazz: [Model200Response].self) { (source: AnyObject, instance: AnyObject?) -> [Model200Response] in + return Decoders.decode(clazz: [Model200Response].self, source: source) + } +2:812a +3:812a +====1 +1:512,518c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Model200Response in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Model200Response() : instance as! Model200Response + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) + return result +2:814,832c +3:814,832c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Model200Response() : instance as! Model200Response + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Model200Response", actual: "\(source)")) + } +====1 +1:522,525c + // Decoder for [Name] + Decoders.addDecoder(clazz: [Name].self) { (source: AnyObject, instance: AnyObject?) -> [Name] in + return Decoders.decode(clazz: [Name].self, source: source) + } +2:835a +3:835a +====1 +1:527,535c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Name in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Name() : instance as! Name + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result.snakeCase = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) + result.property = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) + result._123Number = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) + return result +2:837,867c +3:837,867c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Name() : instance as! Name + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) { + + case let .success(value): result.snakeCase = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) { + + case let .success(value): result.property = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) { + + case let .success(value): result._123Number = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Name", actual: "\(source)")) + } +====1 +1:539,542c + // Decoder for [NumberOnly] + Decoders.addDecoder(clazz: [NumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [NumberOnly] in + return Decoders.decode(clazz: [NumberOnly].self, source: source) + } +2:870a +3:870a +====1 +1:544,549c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> NumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + + result.justNumber = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) + return result +2:872,884c +3:872,884c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) { + + case let .success(value): result.justNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "NumberOnly", actual: "\(source)")) + } +====1 +1:553,556c + // Decoder for [Order] + Decoders.addDecoder(clazz: [Order].self) { (source: AnyObject, instance: AnyObject?) -> [Order] in + return Decoders.decode(clazz: [Order].self, source: source) + } +2:887a +3:887a +====1 +1:558,571c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Order in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Order() : instance as! Order + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.petId = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) + result.quantity = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) + result.shipDate = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Order.Status(rawValue: (status)) + } + + result.complete = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) + return result +2:889,931c +3:889,931c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Order() : instance as! Order + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) { + + case let .success(value): result.petId = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) { + + case let .success(value): result.quantity = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) { + + case let .success(value): result.shipDate = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Order.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) { + + case let .success(value): result.complete = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Order", actual: "\(source)")) + } +====1 +1:575,578c + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } +2:934a +3:934a +====1 +1:580c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject) -> OuterBoolean in +2:936c +3:936c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:582c + return source +2:938,940c +3:938,940c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterBoolean", actual: "\(source)")) +====1 +1:584c + fatalError("Source \(source) is not convertible to typealias OuterBoolean: Maybe swagger file is insufficient") +2:941a +3:941a +====1 +1:588,591c + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } +2:944a +3:944a +====1 +1:593,600c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + + let instance = OuterComposite() + instance.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + instance.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + instance.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return instance +2:946,970c +3:946,970c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + switch Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) { + + case let .success(value): result.myNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) { + + case let .success(value): result.myString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) { + + case let .success(value): result.myBoolean = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "OuterComposite", actual: "\(source)")) + } +====1 +1:604,607c + // Decoder for [OuterEnum] + Decoders.addDecoder(clazz: [OuterEnum].self) { (source: AnyObject, instance: AnyObject?) -> [OuterEnum] in + return Decoders.decode(clazz: [OuterEnum].self, source: source) + } +2:973a +3:973a +====1 +1:609,615c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> OuterEnum in + if let source = source as? String { + if let result = OuterEnum(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type OuterEnum: Maybe swagger file is insufficient") +2:975,977c +3:975,977c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: OuterEnum.self, source: source, instance: instance) +====1 +1:619,622c + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + } +2:980a +3:980a +====1 +1:624c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject) -> OuterNumber in +2:982c +3:982c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:626c + return source +2:984,986c +3:984,986c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterNumber", actual: "\(source)")) +====1 +1:628c + fatalError("Source \(source) is not convertible to typealias OuterNumber: Maybe swagger file is insufficient") +2:987a +3:987a +====1 +1:632,635c + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } +2:990a +3:990a +====1 +1:637c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject) -> OuterString in +2:992c +3:992c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:639c + return source +2:994,996c +3:994,996c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterString", actual: "\(source)")) +====1 +1:641c + fatalError("Source \(source) is not convertible to typealias OuterString: Maybe swagger file is insufficient") +2:997a +3:997a +====1 +1:645,648c + // Decoder for [Pet] + Decoders.addDecoder(clazz: [Pet].self) { (source: AnyObject, instance: AnyObject?) -> [Pet] in + return Decoders.decode(clazz: [Pet].self, source: source) + } +2:1000a +3:1000a +====1 +1:650,663c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Pet in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Pet() : instance as! Pet + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.category = Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + result.photoUrls = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["photoUrls"] as AnyObject?) + result.tags = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["tags"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Pet.Status(rawValue: (status)) + } + + return result +2:1002,1044c +3:1002,1044c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Pet() : instance as! Pet + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) { + + case let .success(value): result.category = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["photoUrls"] as AnyObject?) { + + case let .success(value): result.photoUrls = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [Tag].self, source: sourceDictionary["tags"] as AnyObject?) { + + case let .success(value): result.tags = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Pet.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Pet", actual: "\(source)")) + } +====1 +1:667,670c + // Decoder for [ReadOnlyFirst] + Decoders.addDecoder(clazz: [ReadOnlyFirst].self) { (source: AnyObject, instance: AnyObject?) -> [ReadOnlyFirst] in + return Decoders.decode(clazz: [ReadOnlyFirst].self, source: source) + } +2:1047a +3:1047a +====1 +1:672,678c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> ReadOnlyFirst in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.baz = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) + return result +2:1049,1067c +3:1049,1067c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) { + + case let .success(value): result.baz = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ReadOnlyFirst", actual: "\(source)")) + } +====1 +1:682,685c + // Decoder for [Return] + Decoders.addDecoder(clazz: [Return].self) { (source: AnyObject, instance: AnyObject?) -> [Return] in + return Decoders.decode(clazz: [Return].self, source: source) + } +2:1070a +3:1070a +====1 +1:687,692c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Return in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Return() : instance as! Return + + result._return = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) + return result +2:1072,1084c +3:1072,1084c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Return() : instance as! Return + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) { + + case let .success(value): result._return = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Return", actual: "\(source)")) + } +====1 +1:696,699c + // Decoder for [SpecialModelName] + Decoders.addDecoder(clazz: [SpecialModelName].self) { (source: AnyObject, instance: AnyObject?) -> [SpecialModelName] in + return Decoders.decode(clazz: [SpecialModelName].self, source: source) + } +2:1087a +3:1087a +====1 +1:701,706c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> SpecialModelName in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + + result.specialPropertyName = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) + return result +2:1089,1101c +3:1089,1101c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) { + + case let .success(value): result.specialPropertyName = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "SpecialModelName", actual: "\(source)")) + } +====1 +1:710,713c + // Decoder for [Tag] + Decoders.addDecoder(clazz: [Tag].self) { (source: AnyObject, instance: AnyObject?) -> [Tag] in + return Decoders.decode(clazz: [Tag].self, source: source) + } +2:1104a +3:1104a +====1 +1:715,721c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Tag in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Tag() : instance as! Tag + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:1106,1124c +3:1106,1124c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Tag() : instance as! Tag + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Tag", actual: "\(source)")) + } +====1 +1:725,728c + // Decoder for [User] + Decoders.addDecoder(clazz: [User].self) { (source: AnyObject, instance: AnyObject?) -> [User] in + return Decoders.decode(clazz: [User].self, source: source) + } +2:1127a +3:1127a +====1 +1:730,742c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> User in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? User() : instance as! User + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.username = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) + result.firstName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) + result.lastName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) + result.email = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + result.phone = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) + result.userStatus = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) + return result +2:1129,1183c +3:1129,1183c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? User() : instance as! User + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) { + + case let .success(value): result.username = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) { + + case let .success(value): result.firstName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) { + + case let .success(value): result.lastName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) { + + case let .success(value): result.email = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) { + + case let .success(value): result.phone = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) { + + case let .success(value): result.userStatus = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "User", actual: "\(source)")) + } +====1 +1:749c + } +\ No newline at end of file +2:1190c +3:1190c + } diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_adjacent/diff_PetApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_adjacent/diff_PetApi.php.txt new file mode 100644 index 0000000000..9eb97a1d7a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_adjacent/diff_PetApi.php.txt @@ -0,0 +1,1642 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return PetApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->addPetWithHttpInfo($body); + return $response; +2:96c +3:96c + $this->addPetWithHttpInfo($body); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/pet"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/pet'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/json', 'application/xml']); +2:120,124c +3:120,124c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:140a +2:135c +3:135c + +====1 +1:142c + $httpBody = $formParams; // for HTTP post (form) +2:137,149c +3:137,149c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:143a +2:151,162c +3:151,162c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/json', 'application/xml'] + ); + } + +====1 +1:145,146c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:164,165c +3:164,165c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:148c + // make the API Call +2:167,188c +3:167,188c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:150,158c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet' + ); +2:189a +3:189a +====1 +1:160c + return [null, $statusCode, $httpHeader]; +2:191,213c +3:191,213c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:164c + +2:216a +3:216a +====1 +1:168c + +2:219a +3:219a +====1 +1:176a +2:228c +3:228c + * @throws \InvalidArgumentException +====1 +1:181,182c + list($response) = $this->deletePetWithHttpInfo($pet_id, $api_key); + return $response; +2:233c +3:233c + $this->deletePetWithHttpInfo($pet_id, $api_key); +====1 +1:192a +2:244c +3:244c + * @throws \InvalidArgumentException +====1 +1:201,203c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:253,255c +3:253,255c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:206,211c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:258,260c +3:258,260c + $httpBody = ''; + $multipart = false; + $returnType = ''; +====1 +1:215c + $headerParams['api_key'] = $this->apiClient->getSerializer()->toHeaderValue($api_key); +2:264c +3:264c + $headerParams['api_key'] = ObjectSerializer::toHeaderValue($api_key); +====1 +1:216a +2:266c +3:266c + +====1 +1:219,223c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:269c +3:269c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +==== +1:225c + +2:271,272c + + +3:271,272c + + +====1 +1:228a +2:276c +3:276c + +====1 +1:230c + $httpBody = $formParams; // for HTTP post (form) +2:278,301c +3:278,301c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:231a +2:303c +3:303c + +====1 +1:233,234c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:305,306c +3:305,306c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:236c + // make the API Call +2:308,329c +3:308,329c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + +====1 +1:238,246c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet/{petId}' + ); +2:330a +3:330a +====1 +1:248c + return [null, $statusCode, $httpHeader]; +2:332,354c +3:332,354c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:252c + +2:357a +3:357a +====1 +1:256c + +2:360a +3:360a +====1 +1:263a +2:368c +3:368c + * @throws \InvalidArgumentException +====1 +1:278a +2:384c +3:384c + * @throws \InvalidArgumentException +====1 +1:287,289c + // parse inputs + $resourcePath = "/pet/findByStatus"; + $httpBody = ''; +2:393,395c +3:393,395c + + $resourcePath = '/pet/findByStatus'; + $formParams = []; +====1 +1:292,297c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:398,400c +3:398,400c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet[]'; +====1 +1:301c + $status = $this->apiClient->getSerializer()->serializeCollection($status, 'csv', true); +2:404c +3:404c + $status = ObjectSerializer::serializeCollection($status, 'csv', true); +====1 +1:304c + $queryParams['status'] = $this->apiClient->getSerializer()->toQueryValue($status); +2:407c +3:407c + $queryParams['status'] = ObjectSerializer::toQueryValue($status); +==== +1:306c + +2:409,411c + + + +3:409,411c + + + +====1 +1:309a +2:415c +3:415c + +====1 +1:311c + $httpBody = $formParams; // for HTTP post (form) +2:417,440c +3:417,440c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:312a +2:442c +3:442c + +====1 +1:314,315c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:444,445c +3:444,445c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:317c + // make the API Call +2:447,468c +3:447,468c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:319,327c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet[]', + '/pet/findByStatus' + ); +2:469a +3:469a +====1 +1:329c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet[]', $httpHeader), $statusCode, $httpHeader]; +2:471,507c +3:471,507c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:333c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +2:511c +3:511c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +====1 +1:337c + +2:514a +3:514a +====1 +1:341c + +2:517a +3:517a +====1 +1:348a +2:525c +3:525c + * @throws \InvalidArgumentException +====1 +1:363a +2:541c +3:541c + * @throws \InvalidArgumentException +====1 +1:372,374c + // parse inputs + $resourcePath = "/pet/findByTags"; + $httpBody = ''; +2:550,552c +3:550,552c + + $resourcePath = '/pet/findByTags'; + $formParams = []; +====1 +1:377,382c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:555,557c +3:555,557c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet[]'; +====1 +1:386c + $tags = $this->apiClient->getSerializer()->serializeCollection($tags, 'csv', true); +2:561c +3:561c + $tags = ObjectSerializer::serializeCollection($tags, 'csv', true); +====1 +1:389c + $queryParams['tags'] = $this->apiClient->getSerializer()->toQueryValue($tags); +2:564c +3:564c + $queryParams['tags'] = ObjectSerializer::toQueryValue($tags); +==== +1:391c + +2:566,568c + + + +3:566,568c + + + +====1 +1:394a +2:572c +3:572c + +====1 +1:396c + $httpBody = $formParams; // for HTTP post (form) +2:574,586c +3:574,586c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:397a +2:588,599c +3:588,599c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + +====1 +1:399,400c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:601,602c +3:601,602c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:402c + // make the API Call +2:604,625c +3:604,625c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:404,412c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet[]', + '/pet/findByTags' + ); +2:626a +3:626a +====1 +1:414c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet[]', $httpHeader), $statusCode, $httpHeader]; +2:628,664c +3:628,664c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:418c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +2:668c +3:668c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +====1 +1:422c + +2:671a +3:671a +====1 +1:426c + +2:674a +3:674a +====1 +1:433a +2:682c +3:682c + * @throws \InvalidArgumentException +====1 +1:448a +2:698c +3:698c + * @throws \InvalidArgumentException +====1 +1:457,459c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:707,709c +3:707,709c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:462,467c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:712,715c +3:712,715c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet'; + +====1 +1:471,475c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:719c +3:719c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +==== +1:477c + +2:721,722c + + +3:721,722c + + +====1 +1:480a +2:726c +3:726c + +====1 +1:482c + $httpBody = $formParams; // for HTTP post (form) +2:728,740c +3:728,740c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:484,487c + // this endpoint requires API key authentication + $apiKey = $this->apiClient->getApiKeyWithPrefix('api_key'); + if (strlen($apiKey) !== 0) { + $headerParams['api_key'] = $apiKey; +2:742,751c +3:742,751c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:489c + // make the API Call +2:753,780c +3:753,780c + + // this endpoint requires API key authentication + $apiKey = $this->config->getApiKeyWithPrefix('api_key'); + if ($apiKey !== null) { + $headers['api_key'] = $apiKey; + } + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:491,499c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet', + '/pet/{petId}' + ); +2:781a +3:781a +====1 +1:501c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet', $httpHeader), $statusCode, $httpHeader]; +2:783,819c +3:783,819c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:505c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet', $e->getResponseHeaders()); +2:823c +3:823c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet', $e->getResponseHeaders()); +====1 +1:509c + +2:826a +3:826a +====1 +1:513c + +2:829a +3:829a +====1 +1:520a +2:837c +3:837c + * @throws \InvalidArgumentException +====1 +1:525,526c + list($response) = $this->updatePetWithHttpInfo($body); + return $response; +2:842c +3:842c + $this->updatePetWithHttpInfo($body); +====1 +1:535a +2:852c +3:852c + * @throws \InvalidArgumentException +====1 +1:544,546c + // parse inputs + $resourcePath = "/pet"; + $httpBody = ''; +2:861,863c +3:861,863c + + $resourcePath = '/pet'; + $formParams = []; +====1 +1:549,554c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/json', 'application/xml']); +2:866,870c +3:866,870c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:564a +2:881c +3:881c + +====1 +1:566c + $httpBody = $formParams; // for HTTP post (form) +2:883,895c +3:883,895c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:567a +2:897,908c +3:897,908c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/json', 'application/xml'] + ); + } + +====1 +1:569,570c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:910,911c +3:910,911c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:572c + // make the API Call +2:913,934c +3:913,934c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'PUT', + $url, + $headers, + $httpBody + ); + +====1 +1:574,582c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'PUT', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet' + ); +2:935a +3:935a +====1 +1:584c + return [null, $statusCode, $httpHeader]; +2:937,959c +3:937,959c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:588c + +2:962a +3:962a +====1 +1:592c + +2:965a +3:965a +====1 +1:601a +2:975c +3:975c + * @throws \InvalidArgumentException +====1 +1:606,607c + list($response) = $this->updatePetWithFormWithHttpInfo($pet_id, $name, $status); + return $response; +2:980c +3:980c + $this->updatePetWithFormWithHttpInfo($pet_id, $name, $status); +====1 +1:618a +2:992c +3:992c + * @throws \InvalidArgumentException +====1 +1:627,629c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:1001,1003c +3:1001,1003c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:632,637c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/x-www-form-urlencoded']); +2:1006,1009c +3:1006,1009c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:641,645c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:1013c +3:1013c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +====1 +1:646a +2:1015c +3:1015c + +====1 +1:649c + $formParams['name'] = $this->apiClient->getSerializer()->toFormValue($name); +2:1018c +3:1018c + $formParams['name'] = ObjectSerializer::toFormValue($name); +====1 +1:653c + $formParams['status'] = $this->apiClient->getSerializer()->toFormValue($status); +2:1022c +3:1022c + $formParams['status'] = ObjectSerializer::toFormValue($status); +====3 +1:655c +2:1024c + +3:1024c + +====1 +1:658a +2:1028c +3:1028c + +====1 +1:660c + $httpBody = $formParams; // for HTTP post (form) +2:1030,1053c +3:1030,1053c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/x-www-form-urlencoded'] + ); +====1 +1:661a +2:1055c +3:1055c + +====1 +1:663,664c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:1057,1058c +3:1057,1058c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:666c + // make the API Call +2:1060,1081c +3:1060,1081c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:668,676c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet/{petId}' + ); +2:1082a +3:1082a +====1 +1:678c + return [null, $statusCode, $httpHeader]; +2:1084,1106c +3:1084,1106c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:682c + +2:1109a +3:1109a +====1 +1:686c + +2:1112a +3:1112a +====1 +1:695a +2:1122c +3:1122c + * @throws \InvalidArgumentException +====1 +1:712a +2:1140c +3:1140c + * @throws \InvalidArgumentException +====1 +1:721,723c + // parse inputs + $resourcePath = "/pet/{petId}/uploadImage"; + $httpBody = ''; +2:1149,1151c +3:1149,1151c + + $resourcePath = '/pet/{petId}/uploadImage'; + $formParams = []; +====1 +1:726,731c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['multipart/form-data']); +2:1154,1157c +3:1154,1157c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\ApiResponse'; + +====1 +1:735,739c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:1161c +3:1161c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +====1 +1:740a +2:1163c +3:1163c + +====1 +1:743c + $formParams['additionalMetadata'] = $this->apiClient->getSerializer()->toFormValue($additional_metadata); +2:1166c +3:1166c + $formParams['additionalMetadata'] = ObjectSerializer::toFormValue($additional_metadata); +====1 +1:747,753c + // PHP 5.5 introduced a CurlFile object that deprecates the old @filename syntax + // See: https://wiki.php.net/rfc/curl-file-upload + if (function_exists('curl_file_create')) { + $formParams['file'] = curl_file_create($this->apiClient->getSerializer()->toFormValue($file)); + } else { + $formParams['file'] = '@' . $this->apiClient->getSerializer()->toFormValue($file); + } +2:1170,1171c +3:1170,1171c + $multipart = true; + $formParams['file'] = \GuzzleHttp\Psr7\try_fopen(ObjectSerializer::toFormValue($file), 'rb'); +====3 +1:755c +2:1173c + +3:1173c + +====1 +1:758a +2:1177c +3:1177c + +====1 +1:760c + $httpBody = $formParams; // for HTTP post (form) +2:1179,1191c +3:1179,1191c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:761a +2:1193,1204c +3:1193,1204c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/json'], + ['multipart/form-data'] + ); + } + +====1 +1:763,764c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:1206,1207c +3:1206,1207c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:766c + // make the API Call +2:1209,1230c +3:1209,1230c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:768,776c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\ApiResponse', + '/pet/{petId}/uploadImage' + ); +2:1231a +3:1231a +====1 +1:778c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\ApiResponse', $httpHeader), $statusCode, $httpHeader]; +2:1233,1269c +3:1233,1269c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:782c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\ApiResponse', $e->getResponseHeaders()); +2:1273c +3:1273c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\ApiResponse', $e->getResponseHeaders()); +====1 +1:786c + +2:1276a +3:1276a diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_adjacent/diff_StoreApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_adjacent/diff_StoreApi.php.txt new file mode 100644 index 0000000000..d45a4bcd95 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_adjacent/diff_StoreApi.php.txt @@ -0,0 +1,825 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return StoreApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->deleteOrderWithHttpInfo($order_id); + return $response; +2:96c +3:96c + $this->deleteOrderWithHttpInfo($order_id); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/store/order/{order_id}"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/store/order/{order_id}'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:120,123c +3:120,123c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:134,138c + $resourcePath = str_replace( + "{" . "order_id" . "}", + $this->apiClient->getSerializer()->toPathValue($order_id), + $resourcePath + ); +2:127c +3:127c + $resourcePath = str_replace('{' . 'order_id' . '}', ObjectSerializer::toPathValue($order_id), $resourcePath); +==== +1:140c + +2:129,130c + + +3:129,130c + + +====1 +1:143a +2:134c +3:134c + +====1 +1:145c + $httpBody = $formParams; // for HTTP post (form) +2:136,148c +3:136,148c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:147,156c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/store/order/{order_id}' +2:150,158c +3:150,158c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:157a +2:160,207c +3:160,207c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:159c + return [null, $statusCode, $httpHeader]; +2:208a +3:208a +====1 +1:163c + +2:211a +3:211a +====1 +1:167c + +2:214a +3:214a +====1 +1:173a +2:221c +3:221c + * @throws \InvalidArgumentException +====1 +1:187a +2:236c +3:236c + * @throws \InvalidArgumentException +====1 +1:192,194c + // parse inputs + $resourcePath = "/store/inventory"; + $httpBody = ''; +2:241,243c +3:241,243c + + $resourcePath = '/store/inventory'; + $formParams = []; +==== +1:197,204c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); + + +2:246,252c + $httpBody = ''; + $multipart = false; + $returnType = 'map[string,int]'; + + + + +3:246,252c + $httpBody = ''; + $multipart = false; + $returnType = 'map[string,int]'; + + + + +====1 +1:207a +2:256c +3:256c + +====1 +1:209c + $httpBody = $formParams; // for HTTP post (form) +2:258,270c +3:258,270c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:211,214c + // this endpoint requires API key authentication + $apiKey = $this->apiClient->getApiKeyWithPrefix('api_key'); + if (strlen($apiKey) !== 0) { + $headerParams['api_key'] = $apiKey; +2:272,281c +3:272,281c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/json'], + [] + ); +====1 +1:216c + // make the API Call +2:283,310c +3:283,310c + + // this endpoint requires API key authentication + $apiKey = $this->config->getApiKeyWithPrefix('api_key'); + if ($apiKey !== null) { + $headers['api_key'] = $apiKey; + } + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:218,226c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + 'map[string,int]', + '/store/inventory' + ); +2:311a +3:311a +====1 +1:228c + return [$this->apiClient->getSerializer()->deserialize($response, 'map[string,int]', $httpHeader), $statusCode, $httpHeader]; +2:313,349c +3:313,349c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:232c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), 'map[string,int]', $e->getResponseHeaders()); +2:353c +3:353c + $data = ObjectSerializer::deserialize($e->getResponseBody(), 'map[string,int]', $e->getResponseHeaders()); +====1 +1:236c + +2:356a +3:356a +====1 +1:240c + +2:359a +3:359a +====1 +1:247a +2:367c +3:367c + * @throws \InvalidArgumentException +====1 +1:262a +2:383c +3:383c + * @throws \InvalidArgumentException +====1 +1:271c + if (($order_id > 5)) { +2:392c +3:392c + if ($order_id > 5) { +====1 +1:274c + if (($order_id < 1)) { +2:395c +3:395c + if ($order_id < 1) { +====1 +1:278,280c + // parse inputs + $resourcePath = "/store/order/{order_id}"; + $httpBody = ''; +2:399,401c +3:399,401c + + $resourcePath = '/store/order/{order_id}'; + $formParams = []; +====1 +1:283,288c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:404,407c +3:404,407c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Order'; + +====1 +1:292,296c + $resourcePath = str_replace( + "{" . "order_id" . "}", + $this->apiClient->getSerializer()->toPathValue($order_id), + $resourcePath + ); +2:411c +3:411c + $resourcePath = str_replace('{' . 'order_id' . '}', ObjectSerializer::toPathValue($order_id), $resourcePath); +==== +1:298c + +2:413,414c + + +3:413,414c + + +====1 +1:301a +2:418c +3:418c + +====1 +1:303c + $httpBody = $formParams; // for HTTP post (form) +2:420,432c +3:420,432c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:305,314c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Order', + '/store/order/{order_id}' +2:434,442c +3:434,442c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:315a +2:444,505c +3:444,505c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:317c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Order', $httpHeader), $statusCode, $httpHeader]; +2:506a +3:506a +====1 +1:321c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +2:510c +3:510c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +====1 +1:325c + +2:513a +3:513a +====1 +1:329c + +2:516a +3:516a +====1 +1:336a +2:524c +3:524c + * @throws \InvalidArgumentException +====1 +1:351a +2:540c +3:540c + * @throws \InvalidArgumentException +====1 +1:360,362c + // parse inputs + $resourcePath = "/store/order"; + $httpBody = ''; +2:549,551c +3:549,551c + + $resourcePath = '/store/order'; + $formParams = []; +====1 +1:365,370c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:554,558c +3:554,558c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Order'; + + +====1 +1:380a +2:569c +3:569c + +====1 +1:382c + $httpBody = $formParams; // for HTTP post (form) +2:571,583c +3:571,583c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:384,393c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Order', + '/store/order' +2:585,588c +3:585,588c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:394a +2:590,656c +3:590,656c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:396c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Order', $httpHeader), $statusCode, $httpHeader]; +2:657a +3:657a +====1 +1:400c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +2:661c +3:661c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +====1 +1:404c + +2:664a +3:664a diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_adjacent/diff_UserApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_adjacent/diff_UserApi.php.txt new file mode 100644 index 0000000000..f70f624185 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_adjacent/diff_UserApi.php.txt @@ -0,0 +1,1443 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return UserApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->createUserWithHttpInfo($body); + return $response; +2:96c +3:96c + $this->createUserWithHttpInfo($body); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/user"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/user'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:120,124c +3:120,124c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:140a +2:135c +3:135c + +====1 +1:142c + $httpBody = $formParams; // for HTTP post (form) +2:137,149c +3:137,149c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:144,153c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user' +2:151,154c +3:151,154c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:154a +2:156,208c +3:156,208c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:156c + return [null, $statusCode, $httpHeader]; +2:209a +3:209a +====1 +1:160c + +2:212a +3:212a +====1 +1:164c + +2:215a +3:215a +====1 +1:171a +2:223c +3:223c + * @throws \InvalidArgumentException +====1 +1:176,177c + list($response) = $this->createUsersWithArrayInputWithHttpInfo($body); + return $response; +2:228c +3:228c + $this->createUsersWithArrayInputWithHttpInfo($body); +====1 +1:186a +2:238c +3:238c + * @throws \InvalidArgumentException +====1 +1:195,197c + // parse inputs + $resourcePath = "/user/createWithArray"; + $httpBody = ''; +2:247,249c +3:247,249c + + $resourcePath = '/user/createWithArray'; + $formParams = []; +====1 +1:200,205c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:252,256c +3:252,256c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:215a +2:267c +3:267c + +====1 +1:217c + $httpBody = $formParams; // for HTTP post (form) +2:269,281c +3:269,281c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:219,228c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/createWithArray' +2:283,291c +3:283,291c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:229a +2:293,340c +3:293,340c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:231c + return [null, $statusCode, $httpHeader]; +2:341a +3:341a +====1 +1:235c + +2:344a +3:344a +====1 +1:239c + +2:347a +3:347a +====1 +1:246a +2:355c +3:355c + * @throws \InvalidArgumentException +====1 +1:251,252c + list($response) = $this->createUsersWithListInputWithHttpInfo($body); + return $response; +2:360c +3:360c + $this->createUsersWithListInputWithHttpInfo($body); +====1 +1:261a +2:370c +3:370c + * @throws \InvalidArgumentException +====1 +1:270,272c + // parse inputs + $resourcePath = "/user/createWithList"; + $httpBody = ''; +2:379,381c +3:379,381c + + $resourcePath = '/user/createWithList'; + $formParams = []; +====1 +1:275,280c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:384,388c +3:384,388c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:290a +2:399c +3:399c + +====1 +1:292c + $httpBody = $formParams; // for HTTP post (form) +2:401,413c +3:401,413c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:294,303c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/createWithList' +2:415,423c +3:415,423c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:304a +2:425,472c +3:425,472c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:306c + return [null, $statusCode, $httpHeader]; +2:473a +3:473a +====1 +1:310c + +2:476a +3:476a +====1 +1:314c + +2:479a +3:479a +====1 +1:321a +2:487c +3:487c + * @throws \InvalidArgumentException +====1 +1:326,327c + list($response) = $this->deleteUserWithHttpInfo($username); + return $response; +2:492c +3:492c + $this->deleteUserWithHttpInfo($username); +====1 +1:336a +2:502c +3:502c + * @throws \InvalidArgumentException +====1 +1:345,347c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:511,513c +3:511,513c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:350,355c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:516,519c +3:516,519c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:359,363c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:523c +3:523c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +==== +1:365c + +2:525,526c + + +3:525,526c + + +====1 +1:368a +2:530c +3:530c + +====1 +1:370c + $httpBody = $formParams; // for HTTP post (form) +2:532,544c +3:532,544c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:372,381c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/{username}' +2:546,554c +3:546,554c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:382a +2:556,603c +3:556,603c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:384c + return [null, $statusCode, $httpHeader]; +2:604a +3:604a +====1 +1:388c + +2:607a +3:607a +====1 +1:392c + +2:610a +3:610a +====1 +1:399a +2:618c +3:618c + * @throws \InvalidArgumentException +====1 +1:414a +2:634c +3:634c + * @throws \InvalidArgumentException +====1 +1:423,425c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:643,645c +3:643,645c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:428,433c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:648,651c +3:648,651c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\User'; + +====1 +1:437,441c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:655c +3:655c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +==== +1:443c + +2:657,658c + + +3:657,658c + + +====1 +1:446a +2:662c +3:662c + +====1 +1:448c + $httpBody = $formParams; // for HTTP post (form) +2:664,676c +3:664,676c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:450,459c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\User', + '/user/{username}' +2:678,681c +3:678,681c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:460a +2:683,749c +3:683,749c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:462c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\User', $httpHeader), $statusCode, $httpHeader]; +2:750a +3:750a +====1 +1:466c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\User', $e->getResponseHeaders()); +2:754c +3:754c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\User', $e->getResponseHeaders()); +====1 +1:470c + +2:757a +3:757a +====1 +1:474c + +2:760a +3:760a +====1 +1:482a +2:769c +3:769c + * @throws \InvalidArgumentException +====1 +1:498a +2:786c +3:786c + * @throws \InvalidArgumentException +====1 +1:511,513c + // parse inputs + $resourcePath = "/user/login"; + $httpBody = ''; +2:799,801c +3:799,801c + + $resourcePath = '/user/login'; + $formParams = []; +====1 +1:516,521c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:804,806c +3:804,806c + $httpBody = ''; + $multipart = false; + $returnType = 'string'; +====1 +1:525c + $queryParams['username'] = $this->apiClient->getSerializer()->toQueryValue($username); +2:810c +3:810c + $queryParams['username'] = ObjectSerializer::toQueryValue($username); +====1 +1:529c + $queryParams['password'] = $this->apiClient->getSerializer()->toQueryValue($password); +2:814c +3:814c + $queryParams['password'] = ObjectSerializer::toQueryValue($password); +==== +1:531c + +2:816,818c + + + +3:816,818c + + + +====1 +1:534a +2:822c +3:822c + +====1 +1:536c + $httpBody = $formParams; // for HTTP post (form) +2:824,836c +3:824,836c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:538,547c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + 'string', + '/user/login' +2:838,846c +3:838,846c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:548a +2:848,909c +3:848,909c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:550c + return [$this->apiClient->getSerializer()->deserialize($response, 'string', $httpHeader), $statusCode, $httpHeader]; +2:910a +3:910a +====1 +1:554c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), 'string', $e->getResponseHeaders()); +2:914c +3:914c + $data = ObjectSerializer::deserialize($e->getResponseBody(), 'string', $e->getResponseHeaders()); +====1 +1:558c + +2:917a +3:917a +====1 +1:562c + +2:920a +3:920a +====1 +1:568a +2:927c +3:927c + * @throws \InvalidArgumentException +====1 +1:573,574c + list($response) = $this->logoutUserWithHttpInfo(); + return $response; +2:932c +3:932c + $this->logoutUserWithHttpInfo(); +====1 +1:582a +2:941c +3:941c + * @throws \InvalidArgumentException +====1 +1:587,589c + // parse inputs + $resourcePath = "/user/logout"; + $httpBody = ''; +2:946,948c +3:946,948c + + $resourcePath = '/user/logout'; + $formParams = []; +==== +1:592,599c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); + + +2:951,957c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + + + +3:951,957c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + + + +====1 +1:602a +2:961c +3:961c + +====1 +1:604c + $httpBody = $formParams; // for HTTP post (form) +2:963,975c +3:963,975c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:606,615c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/logout' +2:977,980c +3:977,980c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:616a +2:982,1034c +3:982,1034c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:618c + return [null, $statusCode, $httpHeader]; +2:1035a +3:1035a +====1 +1:622c + +2:1038a +3:1038a +====1 +1:626c + +2:1041a +3:1041a +====1 +1:634a +2:1050c +3:1050c + * @throws \InvalidArgumentException +====1 +1:639,640c + list($response) = $this->updateUserWithHttpInfo($username, $body); + return $response; +2:1055c +3:1055c + $this->updateUserWithHttpInfo($username, $body); +====1 +1:650a +2:1066c +3:1066c + * @throws \InvalidArgumentException +====1 +1:663,665c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:1079,1081c +3:1079,1081c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:668,673c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:1084,1087c +3:1084,1087c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:677,681c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:1091c +3:1091c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +====1 +1:682a +2:1093c +3:1093c + +====1 +1:691a +2:1103c +3:1103c + +====1 +1:693c + $httpBody = $formParams; // for HTTP post (form) +2:1105,1117c +3:1105,1117c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:695,704c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'PUT', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/{username}' +2:1119,1127c +3:1119,1127c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:705a +2:1129,1176c +3:1129,1176c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'PUT', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:707c + return [null, $statusCode, $httpHeader]; +2:1177a +3:1177a +====1 +1:711c + +2:1180a +3:1180a diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_adjacent/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_adjacent/diff_VERSION.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_adjacent/diff_io.swagger.codegen.CodegenConfig.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_adjacent/diff_io.swagger.codegen.CodegenConfig.txt new file mode 100644 index 0000000000..909e86e0be --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_adjacent/diff_io.swagger.codegen.CodegenConfig.txt @@ -0,0 +1,13 @@ +==== +1:2c + io.swagger.codegen.languages.AspNet5ServerCodegen +2:1a +3:2c + io.swagger.codegen.languages.ApexClientCodegen +====3 +1:67c +2:66c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen +\ No newline at end of file +3:67c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_ignorespace/diff_Models.swift.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_ignorespace/diff_Models.swift.txt new file mode 100644 index 0000000000..a627a5759c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_ignorespace/diff_Models.swift.txt @@ -0,0 +1,1982 @@ +====1 +1:14c + case Error(Int, Data?, Error) +2:14,15c +3:14,15c + case HttpError(statusCode: Int, data: Data?, error: Error) + case DecodeError(response: Data?, decodeError: DecodeError) +====1 +1:37a +2:39,60c +3:39,60c + public enum Decoded { + case success(ValueType) + case failure(DecodeError) + } + + public extension Decoded { + var value: ValueType? { + switch self { + case let .success(value): + return value + case .failure: + return nil + } + } + } + + public enum DecodeError { + case typeMismatch(expected: String, actual: String) + case missingKey(key: String) + case parseError(message: String) + } + +====1 +1:42c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> T)) { +2:65c +3:65c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> Decoded)) { +====1 +1:47,50c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> T { + let key = discriminator; + if let decoder = decoders[key] { + return decoder(source, nil) as! T +2:70,73c +3:70,73c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> Decoded { + let key = discriminator + if let decoder = decoders[key], let value = decoder(source, nil) as? Decoded { + return value +====1 +1:52c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:75c +3:75c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:56,58c + static func decode(clazz: [T].Type, source: AnyObject) -> [T] { + let array = source as! [AnyObject] + return array.map { Decoders.decode(clazz: T.self, source: $0, instance: nil) } +2:79,93c +3:79,93c + static func decode(clazz: [T].Type, source: AnyObject) -> Decoded<[T]> { + if let sourceArray = source as? [AnyObject] { + var values = [T]() + for sourceValue in sourceArray { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): + values.append(value) + case let .failure(error): + return .failure(error) + } + } + return .success(values) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } +====1 +1:61,65c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> [Key:T] { + let sourceDictionary = source as! [Key: AnyObject] + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + dictionary[key] = Decoders.decode(clazz: T.self, source: value, instance: nil) +2:96,122c +3:96,122c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> Decoded<[Key:T]> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): + dictionary[key] = value + case let .failure(error): + return .failure(error) + } + } + return .success(dictionary) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } + } + + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + guard !(source is NSNull), source != nil else { return .success(nil) } + if let value = source as? T.RawValue { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "\(T.RawValue.self) matching a case from the enumeration \(T.self)", actual: String(describing: type(of: source)))) +====1 +1:67c + return dictionary +2:123a +3:123a +====1 +1:70c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> T { +2:126c +3:126c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> Decoded { +====1 +1:72,73c + if T.self is Int32.Type && source is NSNumber { + return (source as! NSNumber).int32Value as! T; +2:128,129c +3:128,129c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int32.Type { + return .success(value) +====1 +1:75,76c + if T.self is Int64.Type && source is NSNumber { + return source.int64Value as! T; +2:131,132c +3:131,132c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int64.Type { + return .success(value) +====1 +1:78,79c + if T.self is UUID.Type && source is String { + return UUID(uuidString: source as! String) as! T +2:134,135c +3:134,135c + if let intermediate = source as? String, let value = UUID(uuidString: intermediate) as? T, source is String, T.self is UUID.Type { + return .success(value) +====1 +1:81,82c + if source is T { + return source as! T +2:137,138c +3:137,138c + if let value = source as? T { + return .success(value) +====1 +1:84,85c + if T.self is Data.Type && source is String { + return Data(base64Encoded: source as! String) as! T +2:140,141c +3:140,141c + if let intermediate = source as? String, let value = Data(base64Encoded: intermediate) as? T { + return .success(value) +====1 +1:89,90c + if let decoder = decoders[key] { + return decoder(source, instance) as! T +2:145,146c +3:145,146c + if let decoder = decoders[key], let value = decoder(source, instance) as? Decoded { + return value +====1 +1:92c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:148c +3:148c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:96,102c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> T? { + if source is NSNull { + return nil + } + return source.map { (source: AnyObject) -> T in + Decoders.decode(clazz: clazz, source: source, instance: nil) + } +2:152,154c +3:152,154c + //Convert a Decoded so that its value is optional. DO WE STILL NEED THIS? + static func toOptional(decoded: Decoded) -> Decoded { + return .success(decoded.value) +====1 +1:105,107c + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> [T]? { + if source is NSNull { + return nil +2:157,164c +3:157,164c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + if let source = source, !(source is NSNull) { + switch Decoders.decode(clazz: clazz, source: source, instance: nil) { + case let .success(value): return .success(value) + case let .failure(error): return .failure(error) + } + } else { + return .success(nil) +====1 +1:109,110c + return source.map { (someSource: AnyObject) -> [T] in + Decoders.decode(clazz: clazz, source: someSource) +2:166,179c +3:166,179c + } + + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> Decoded<[T]?> { + if let source = source as? [AnyObject] { + var values = [T]() + for sourceValue in source { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): values.append(value) + case let .failure(error): return .failure(error) + } + } + return .success(values) + } else { + return .success(nil) +====1 +1:114,116c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> [Key:T]? { + if source is NSNull { + return nil +2:183,194c +3:183,194c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> Decoded<[Key:T]?> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): dictionary[key] = value + case let .failure(error): return .failure(error) + } + } + return .success(dictionary) + } else { + return .success(nil) +====1 +1:118,119c + return source.map { (someSource: AnyObject) -> [Key:T] in + Decoders.decode(clazz: clazz, source: someSource) +2:196,206c +3:196,206c + } + + static func decodeOptional(clazz: T, source: AnyObject) -> Decoded { + if let value = source as? U { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "String", actual: String(describing: type(of: source)))) +====1 +1:122a +2:210c +3:210c + +====1 +1:137c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Date in +2:225c +3:225c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:141c + return date +2:229c +3:229c + return .success(date) +====1 +1:145c + if let sourceInt = source as? Int64 { +2:233c +3:233c + if let sourceInt = source as? Int { +====1 +1:147c + return Date(timeIntervalSince1970: Double(sourceInt / 1000) ) +2:235,250c +3:235,250c + return .success(Date(timeIntervalSince1970: Double(sourceInt / 1000) )) + } + if source is String || source is Int { + return .failure(.parseError(message: "Could not decode date")) + } else { + return .failure(.typeMismatch(expected: "String or Int", actual: "\(source)")) + } + } + + // Decoder for ISOFullDate + Decoders.addDecoder(clazz: ISOFullDate.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let string = source as? String, + let isoDate = ISOFullDate.from(string: string) { + return .success(isoDate) + } else { + return .failure(.typeMismatch(expected: "ISO date", actual: "\(source)")) +====1 +1:149c + fatalError("formatter failed to parse \(source)") +2:251a +3:251a +====1 +1:152,155c + // Decoder for [AdditionalPropertiesClass] + Decoders.addDecoder(clazz: [AdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [AdditionalPropertiesClass] in + return Decoders.decode(clazz: [AdditionalPropertiesClass].self, source: source) + } +2:253a +3:253a +====1 +1:157,163c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> AdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + + result.mapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_property"] as AnyObject?) + result.mapOfMapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_of_map_property"] as AnyObject?) + return result +2:255,273c +3:255,273c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: [String:String].self, source: sourceDictionary["map_property"] as AnyObject?) { + + case let .success(value): result.mapProperty = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_of_map_property"] as AnyObject?) { + + case let .success(value): result.mapOfMapProperty = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "AdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:167,170c + // Decoder for [Animal] + Decoders.addDecoder(clazz: [Animal].self) { (source: AnyObject, instance: AnyObject?) -> [Animal] in + return Decoders.decode(clazz: [Animal].self, source: source) + } +2:276a +3:276a +====1 +1:172,176c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Animal in + let sourceDictionary = source as! [AnyHashable: Any] + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal" { + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) +2:278,299c +3:278,299c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal"{ + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) + } + let result = instance == nil ? Animal() : instance as! Animal + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Animal", actual: "\(source)")) +====1 +1:178,182c + let result = instance == nil ? Animal() : instance as! Animal + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + return result +2:300a +3:300a +====1 +1:186,194c + // Decoder for [AnimalFarm] + Decoders.addDecoder(clazz: [AnimalFarm].self) { (source: AnyObject, instance: AnyObject?) -> [AnimalFarm] in + return Decoders.decode(clazz: [AnimalFarm].self, source: source) + } + // Decoder for AnimalFarm + Decoders.addDecoder(clazz: AnimalFarm.self) { (source: AnyObject, instance: AnyObject?) -> AnimalFarm in + let sourceArray = source as! [AnyObject] + return sourceArray.map({ Decoders.decode(clazz: Animal.self, source: $0, instance: nil) }) + } +2:303a +3:303a +====1 +1:197,200c + // Decoder for [ApiResponse] + Decoders.addDecoder(clazz: [ApiResponse].self) { (source: AnyObject, instance: AnyObject?) -> [ApiResponse] in + return Decoders.decode(clazz: [ApiResponse].self, source: source) + } +2:306c +3:306c + +====1 +1:202,209c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> ApiResponse in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + + result.code = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) + result.type = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) + result.message = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) + return result +2:308,332c +3:308,332c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) { + + case let .success(value): result.code = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) { + + case let .success(value): result.type = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) { + + case let .success(value): result.message = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ApiResponse", actual: "\(source)")) + } +====1 +1:213,216c + // Decoder for [ArrayOfArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfArrayOfNumberOnly].self, source: source) + } +2:335a +3:335a +====1 +1:218,223c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + + result.arrayArrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) + return result +2:337,349c +3:337,349c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [[Double]].self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayArrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:227,230c + // Decoder for [ArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfNumberOnly].self, source: source) + } +2:352a +3:352a +====1 +1:232,237c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + + result.arrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayNumber"] as AnyObject?) + return result +2:354,366c +3:354,366c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [Double].self, source: sourceDictionary["ArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:241,244c + // Decoder for [ArrayTest] + Decoders.addDecoder(clazz: [ArrayTest].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayTest] in + return Decoders.decode(clazz: [ArrayTest].self, source: source) + } +2:369a +3:369a +====1 +1:246,253c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> ArrayTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + + result.arrayOfString = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_of_string"] as AnyObject?) + result.arrayArrayOfInteger = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) + result.arrayArrayOfModel = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_model"] as AnyObject?) + return result +2:371,395c +3:371,395c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["array_of_string"] as AnyObject?) { + + case let .success(value): result.arrayOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[Int64]].self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[ReadOnlyFirst]].self, source: sourceDictionary["array_array_of_model"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfModel = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayTest", actual: "\(source)")) + } +====1 +1:257,260c + // Decoder for [Capitalization] + Decoders.addDecoder(clazz: [Capitalization].self) { (source: AnyObject, instance: AnyObject?) -> [Capitalization] in + return Decoders.decode(clazz: [Capitalization].self, source: source) + } +2:398a +3:398a +====1 +1:262,272c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Capitalization in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Capitalization() : instance as! Capitalization + + result.smallCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) + result.capitalCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) + result.smallSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) + result.capitalSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) + result.sCAETHFlowPoints = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) + result.ATT_NAME = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) + return result +2:400,442c +3:400,442c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Capitalization() : instance as! Capitalization + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) { + + case let .success(value): result.smallCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) { + + case let .success(value): result.capitalCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) { + + case let .success(value): result.smallSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) { + + case let .success(value): result.capitalSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) { + + case let .success(value): result.sCAETHFlowPoints = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) { + + case let .success(value): result.ATT_NAME = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Capitalization", actual: "\(source)")) + } +====1 +1:276,279c + // Decoder for [Cat] + Decoders.addDecoder(clazz: [Cat].self) { (source: AnyObject, instance: AnyObject?) -> [Cat] in + return Decoders.decode(clazz: [Cat].self, source: source) + } +2:445a +3:445a +====1 +1:281,291c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Cat in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.declawed = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) + return result +2:447,474c +3:447,474c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) { + + case let .success(value): result.declawed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Cat", actual: "\(source)")) + } +====1 +1:295,298c + // Decoder for [Category] + Decoders.addDecoder(clazz: [Category].self) { (source: AnyObject, instance: AnyObject?) -> [Category] in + return Decoders.decode(clazz: [Category].self, source: source) + } +2:477a +3:477a +====1 +1:300,306c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Category in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Category() : instance as! Category + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:479,497c +3:479,497c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Category() : instance as! Category + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Category", actual: "\(source)")) + } +====1 +1:310,313c + // Decoder for [ClassModel] + Decoders.addDecoder(clazz: [ClassModel].self) { (source: AnyObject, instance: AnyObject?) -> [ClassModel] in + return Decoders.decode(clazz: [ClassModel].self, source: source) + } +2:500a +3:500a +====1 +1:315,320c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> ClassModel in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ClassModel() : instance as! ClassModel + + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) + return result +2:502,514c +3:502,514c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ClassModel() : instance as! ClassModel + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ClassModel", actual: "\(source)")) + } +====1 +1:324,327c + // Decoder for [Client] + Decoders.addDecoder(clazz: [Client].self) { (source: AnyObject, instance: AnyObject?) -> [Client] in + return Decoders.decode(clazz: [Client].self, source: source) + } +2:517a +3:517a +====1 +1:329,334c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Client in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Client() : instance as! Client + + result.client = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) + return result +2:519,531c +3:519,531c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Client() : instance as! Client + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) { + + case let .success(value): result.client = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Client", actual: "\(source)")) + } +====1 +1:338,341c + // Decoder for [Dog] + Decoders.addDecoder(clazz: [Dog].self) { (source: AnyObject, instance: AnyObject?) -> [Dog] in + return Decoders.decode(clazz: [Dog].self, source: source) + } +2:534a +3:534a +====1 +1:343,353c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Dog in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.breed = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) + return result +2:536,563c +3:536,563c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) { + + case let .success(value): result.breed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Dog", actual: "\(source)")) + } +====1 +1:357,360c + // Decoder for [EnumArrays] + Decoders.addDecoder(clazz: [EnumArrays].self) { (source: AnyObject, instance: AnyObject?) -> [EnumArrays] in + return Decoders.decode(clazz: [EnumArrays].self, source: source) + } +2:566a +3:566a +====1 +1:362,371c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> EnumArrays in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + + if let justSymbol = sourceDictionary["just_symbol"] as? String { + result.justSymbol = EnumArrays.JustSymbol(rawValue: (justSymbol)) + } + + if let arrayEnum = sourceDictionary["array_enum"] as? [String] { + result.arrayEnum = arrayEnum.map ({ EnumArrays.ArrayEnum(rawValue: $0)! }) +2:568,585c +3:568,585c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + switch Decoders.decodeOptional(clazz: EnumArrays.JustSymbol.self, source: sourceDictionary["just_symbol"] as AnyObject?) { + + case let .success(value): result.justSymbol = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_enum"] as AnyObject?) { + + case let .success(value): result.arrayEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumArrays", actual: "\(source)")) +====1 +1:373,374c + + return result +2:586a +3:586a +====1 +1:378,381c + // Decoder for [EnumClass] + Decoders.addDecoder(clazz: [EnumClass].self) { (source: AnyObject, instance: AnyObject?) -> [EnumClass] in + return Decoders.decode(clazz: [EnumClass].self, source: source) + } +2:589a +3:589a +====1 +1:383,389c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> EnumClass in + if let source = source as? String { + if let result = EnumClass(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type EnumClass: Maybe swagger file is insufficient") +2:591,593c +3:591,593c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: EnumClass.self, source: source, instance: instance) +====1 +1:393,396c + // Decoder for [EnumTest] + Decoders.addDecoder(clazz: [EnumTest].self) { (source: AnyObject, instance: AnyObject?) -> [EnumTest] in + return Decoders.decode(clazz: [EnumTest].self, source: source) + } +2:596a +3:596a +====1 +1:398,415c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> EnumTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumTest() : instance as! EnumTest + + if let enumString = sourceDictionary["enum_string"] as? String { + result.enumString = EnumTest.EnumString(rawValue: (enumString)) + } + + if let enumInteger = sourceDictionary["enum_integer"] as? Int32 { + result.enumInteger = EnumTest.EnumInteger(rawValue: (enumInteger)) + } + + if let enumNumber = sourceDictionary["enum_number"] as? Double { + result.enumNumber = EnumTest.EnumNumber(rawValue: (enumNumber)) + } + + result.outerEnum = Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) + return result +2:598,628c +3:598,628c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumTest() : instance as! EnumTest + switch Decoders.decodeOptional(clazz: EnumTest.EnumString.self, source: sourceDictionary["enum_string"] as AnyObject?) { + + case let .success(value): result.enumString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumInteger.self, source: sourceDictionary["enum_integer"] as AnyObject?) { + + case let .success(value): result.enumInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumNumber.self, source: sourceDictionary["enum_number"] as AnyObject?) { + + case let .success(value): result.enumNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) { + + case let .success(value): result.outerEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumTest", actual: "\(source)")) + } +====1 +1:419,422c + // Decoder for [FormatTest] + Decoders.addDecoder(clazz: [FormatTest].self) { (source: AnyObject, instance: AnyObject?) -> [FormatTest] in + return Decoders.decode(clazz: [FormatTest].self, source: source) + } +2:631a +3:631a +====1 +1:424,441c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> FormatTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? FormatTest() : instance as! FormatTest + + result.integer = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) + result.int32 = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) + result.int64 = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) + result.number = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) + result.float = Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) + result.double = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) + result.string = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) + result.byte = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) + result.binary = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) + result.date = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["date"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + return result +2:633,717c +3:633,717c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? FormatTest() : instance as! FormatTest + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) { + + case let .success(value): result.integer = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) { + + case let .success(value): result.int32 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) { + + case let .success(value): result.int64 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) { + + case let .success(value): result.number = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) { + + case let .success(value): result.float = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) { + + case let .success(value): result.double = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) { + + case let .success(value): result.string = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) { + + case let .success(value): result.byte = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) { + + case let .success(value): result.binary = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: ISOFullDate.self, source: sourceDictionary["date"] as AnyObject?) { + + case let .success(value): result.date = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "FormatTest", actual: "\(source)")) + } +====1 +1:445,448c + // Decoder for [HasOnlyReadOnly] + Decoders.addDecoder(clazz: [HasOnlyReadOnly].self) { (source: AnyObject, instance: AnyObject?) -> [HasOnlyReadOnly] in + return Decoders.decode(clazz: [HasOnlyReadOnly].self, source: source) + } +2:720a +3:720a +====1 +1:450,456c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> HasOnlyReadOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.foo = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) + return result +2:722,740c +3:722,740c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) { + + case let .success(value): result.foo = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "HasOnlyReadOnly", actual: "\(source)")) + } +====1 +1:460,463c + // Decoder for [List] + Decoders.addDecoder(clazz: [List].self) { (source: AnyObject, instance: AnyObject?) -> [List] in + return Decoders.decode(clazz: [List].self, source: source) + } +2:743a +3:743a +====1 +1:465,470c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> List in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? List() : instance as! List + + result._123List = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) + return result +2:745,757c +3:745,757c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? List() : instance as! List + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) { + + case let .success(value): result._123List = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "List", actual: "\(source)")) + } +====1 +1:474,477c + // Decoder for [MapTest] + Decoders.addDecoder(clazz: [MapTest].self) { (source: AnyObject, instance: AnyObject?) -> [MapTest] in + return Decoders.decode(clazz: [MapTest].self, source: source) + } +2:760a +3:760a +====1 +1:479,484c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> MapTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MapTest() : instance as! MapTest + + result.mapMapOfString = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_map_of_string"] as AnyObject?) + if let mapOfEnumString = sourceDictionary["map_of_enum_string"] as? [String:String] { //TODO: handle enum map scenario +2:762,779c +3:762,779c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MapTest() : instance as! MapTest + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_map_of_string"] as AnyObject?) { + + case let .success(value): result.mapMapOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: MapTest.MapOfEnumString.self, source: sourceDictionary["map_of_enum_string"] as AnyObject?) { + /* + case let .success(value): result.mapOfEnumString = value + case let .failure(error): return .failure(error) + */ default: break //TODO: handle enum map scenario + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MapTest", actual: "\(source)")) +====1 +1:486,487c + + return result +2:780a +3:780a +====1 +1:491,494c + // Decoder for [MixedPropertiesAndAdditionalPropertiesClass] + Decoders.addDecoder(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [MixedPropertiesAndAdditionalPropertiesClass] in + return Decoders.decode(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self, source: source) + } +2:783a +3:783a +====1 +1:496,503c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> MixedPropertiesAndAdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.map = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map"] as AnyObject?) + return result +2:785,809c +3:785,809c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:Animal].self, source: sourceDictionary["map"] as AnyObject?) { + + case let .success(value): result.map = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MixedPropertiesAndAdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:507,510c + // Decoder for [Model200Response] + Decoders.addDecoder(clazz: [Model200Response].self) { (source: AnyObject, instance: AnyObject?) -> [Model200Response] in + return Decoders.decode(clazz: [Model200Response].self, source: source) + } +2:812a +3:812a +====1 +1:512,518c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Model200Response in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Model200Response() : instance as! Model200Response + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) + return result +2:814,832c +3:814,832c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Model200Response() : instance as! Model200Response + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Model200Response", actual: "\(source)")) + } +====1 +1:522,525c + // Decoder for [Name] + Decoders.addDecoder(clazz: [Name].self) { (source: AnyObject, instance: AnyObject?) -> [Name] in + return Decoders.decode(clazz: [Name].self, source: source) + } +2:835a +3:835a +====1 +1:527,535c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Name in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Name() : instance as! Name + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result.snakeCase = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) + result.property = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) + result._123Number = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) + return result +2:837,867c +3:837,867c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Name() : instance as! Name + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) { + + case let .success(value): result.snakeCase = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) { + + case let .success(value): result.property = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) { + + case let .success(value): result._123Number = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Name", actual: "\(source)")) + } +====1 +1:539,542c + // Decoder for [NumberOnly] + Decoders.addDecoder(clazz: [NumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [NumberOnly] in + return Decoders.decode(clazz: [NumberOnly].self, source: source) + } +2:870a +3:870a +====1 +1:544,549c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> NumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + + result.justNumber = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) + return result +2:872,884c +3:872,884c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) { + + case let .success(value): result.justNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "NumberOnly", actual: "\(source)")) + } +====1 +1:553,556c + // Decoder for [Order] + Decoders.addDecoder(clazz: [Order].self) { (source: AnyObject, instance: AnyObject?) -> [Order] in + return Decoders.decode(clazz: [Order].self, source: source) + } +2:887a +3:887a +====1 +1:558,571c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Order in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Order() : instance as! Order + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.petId = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) + result.quantity = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) + result.shipDate = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Order.Status(rawValue: (status)) + } + + result.complete = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) + return result +2:889,931c +3:889,931c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Order() : instance as! Order + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) { + + case let .success(value): result.petId = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) { + + case let .success(value): result.quantity = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) { + + case let .success(value): result.shipDate = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Order.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) { + + case let .success(value): result.complete = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Order", actual: "\(source)")) + } +==== +1:575,578c + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } +2:935,946c + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } + ======= + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject, instance: AnyObject?) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } + >>>>>>> TEMP_RIGHT_BRANCH +3:934a +==== +1:580c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject) -> OuterBoolean in +2:948,954c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject) -> OuterBoolean in + ======= + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> OuterBoolean in + >>>>>>> TEMP_RIGHT_BRANCH +3:936c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:582c + return source +2:956,958c +3:938,940c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterBoolean", actual: "\(source)")) +====1 +1:584c + fatalError("Source \(source) is not convertible to typealias OuterBoolean: Maybe swagger file is insufficient") +2:959a +3:941a +==== +1:588,591c + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } +2:963,974c + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } + ======= + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject, instance: AnyObject?) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } + >>>>>>> TEMP_RIGHT_BRANCH +3:944a +==== +1:593,600c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + + let instance = OuterComposite() + instance.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + instance.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + instance.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return instance +2:976,1022c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + switch Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) { + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + ======= + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + >>>>>>> TEMP_RIGHT_BRANCH + + <<<<<<< HEAD + case let .success(value): result.myNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) { + + case let .success(value): result.myString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) { + + case let .success(value): result.myBoolean = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "OuterComposite", actual: "\(source)")) + } + ||||||| 4479382ced + let instance = OuterComposite() + instance.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + instance.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + instance.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return instance + ======= + result.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + result.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + result.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return result + >>>>>>> TEMP_RIGHT_BRANCH +3:946,970c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + switch Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) { + + case let .success(value): result.myNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) { + + case let .success(value): result.myString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) { + + case let .success(value): result.myBoolean = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "OuterComposite", actual: "\(source)")) + } +====1 +1:604,607c + // Decoder for [OuterEnum] + Decoders.addDecoder(clazz: [OuterEnum].self) { (source: AnyObject, instance: AnyObject?) -> [OuterEnum] in + return Decoders.decode(clazz: [OuterEnum].self, source: source) + } +2:1025a +3:973a +====1 +1:609,615c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> OuterEnum in + if let source = source as? String { + if let result = OuterEnum(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type OuterEnum: Maybe swagger file is insufficient") +2:1027,1029c +3:975,977c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: OuterEnum.self, source: source, instance: instance) +==== +1:619,622c + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + } +2:1033,1044c + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + } + ======= + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject, instance: AnyObject?) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + } + >>>>>>> TEMP_RIGHT_BRANCH +3:980a +==== +1:624c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject) -> OuterNumber in +2:1046,1052c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject) -> OuterNumber in + ======= + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> OuterNumber in + >>>>>>> TEMP_RIGHT_BRANCH +3:982c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:626c + return source +2:1054,1056c +3:984,986c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterNumber", actual: "\(source)")) +====1 +1:628c + fatalError("Source \(source) is not convertible to typealias OuterNumber: Maybe swagger file is insufficient") +2:1057a +3:987a +==== +1:632,635c + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } +2:1061,1072c + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } + ======= + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject, instance: AnyObject?) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } + >>>>>>> TEMP_RIGHT_BRANCH +3:990a +==== +1:637c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject) -> OuterString in +2:1074,1080c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject) -> OuterString in + ======= + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> OuterString in + >>>>>>> TEMP_RIGHT_BRANCH +3:992c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:639c + return source +2:1082,1084c +3:994,996c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterString", actual: "\(source)")) +====1 +1:641c + fatalError("Source \(source) is not convertible to typealias OuterString: Maybe swagger file is insufficient") +2:1085a +3:997a +====1 +1:645,648c + // Decoder for [Pet] + Decoders.addDecoder(clazz: [Pet].self) { (source: AnyObject, instance: AnyObject?) -> [Pet] in + return Decoders.decode(clazz: [Pet].self, source: source) + } +2:1088a +3:1000a +====1 +1:650,663c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Pet in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Pet() : instance as! Pet + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.category = Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + result.photoUrls = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["photoUrls"] as AnyObject?) + result.tags = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["tags"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Pet.Status(rawValue: (status)) + } + + return result +2:1090,1132c +3:1002,1044c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Pet() : instance as! Pet + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) { + + case let .success(value): result.category = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["photoUrls"] as AnyObject?) { + + case let .success(value): result.photoUrls = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [Tag].self, source: sourceDictionary["tags"] as AnyObject?) { + + case let .success(value): result.tags = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Pet.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Pet", actual: "\(source)")) + } +====1 +1:667,670c + // Decoder for [ReadOnlyFirst] + Decoders.addDecoder(clazz: [ReadOnlyFirst].self) { (source: AnyObject, instance: AnyObject?) -> [ReadOnlyFirst] in + return Decoders.decode(clazz: [ReadOnlyFirst].self, source: source) + } +2:1135a +3:1047a +====1 +1:672,678c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> ReadOnlyFirst in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.baz = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) + return result +2:1137,1155c +3:1049,1067c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) { + + case let .success(value): result.baz = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ReadOnlyFirst", actual: "\(source)")) + } +====1 +1:682,685c + // Decoder for [Return] + Decoders.addDecoder(clazz: [Return].self) { (source: AnyObject, instance: AnyObject?) -> [Return] in + return Decoders.decode(clazz: [Return].self, source: source) + } +2:1158a +3:1070a +====1 +1:687,692c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Return in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Return() : instance as! Return + + result._return = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) + return result +2:1160,1172c +3:1072,1084c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Return() : instance as! Return + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) { + + case let .success(value): result._return = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Return", actual: "\(source)")) + } +====1 +1:696,699c + // Decoder for [SpecialModelName] + Decoders.addDecoder(clazz: [SpecialModelName].self) { (source: AnyObject, instance: AnyObject?) -> [SpecialModelName] in + return Decoders.decode(clazz: [SpecialModelName].self, source: source) + } +2:1175a +3:1087a +====1 +1:701,706c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> SpecialModelName in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + + result.specialPropertyName = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) + return result +2:1177,1189c +3:1089,1101c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) { + + case let .success(value): result.specialPropertyName = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "SpecialModelName", actual: "\(source)")) + } +====1 +1:710,713c + // Decoder for [Tag] + Decoders.addDecoder(clazz: [Tag].self) { (source: AnyObject, instance: AnyObject?) -> [Tag] in + return Decoders.decode(clazz: [Tag].self, source: source) + } +2:1192a +3:1104a +====1 +1:715,721c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Tag in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Tag() : instance as! Tag + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:1194,1212c +3:1106,1124c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Tag() : instance as! Tag + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Tag", actual: "\(source)")) + } +====1 +1:725,728c + // Decoder for [User] + Decoders.addDecoder(clazz: [User].self) { (source: AnyObject, instance: AnyObject?) -> [User] in + return Decoders.decode(clazz: [User].self, source: source) + } +2:1215a +3:1127a +====1 +1:730,742c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> User in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? User() : instance as! User + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.username = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) + result.firstName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) + result.lastName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) + result.email = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + result.phone = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) + result.userStatus = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) + return result +2:1217,1271c +3:1129,1183c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? User() : instance as! User + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) { + + case let .success(value): result.username = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) { + + case let .success(value): result.firstName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) { + + case let .success(value): result.lastName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) { + + case let .success(value): result.email = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) { + + case let .success(value): result.phone = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) { + + case let .success(value): result.userStatus = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "User", actual: "\(source)")) + } +====1 +1:749c + } +\ No newline at end of file +2:1278c +3:1190c + } diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_ignorespace/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_ignorespace/diff_VERSION.txt new file mode 100644 index 0000000000..ce107c7353 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_ignorespace/diff_VERSION.txt @@ -0,0 +1,10 @@ +1,6c1 +< <<<<<<< HEAD +< 2.3.0-SNAPSHOT +< ||||||| 4479382ced +< ======= +< 2.2.3-SNAPSHOT +< >>>>>>> TEMP_RIGHT_BRANCH +--- +> 2.3.0-SNAPSHOT +\ No newline at end of file diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_ignorespace/diff_io.swagger.codegen.CodegenConfig.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_ignorespace/diff_io.swagger.codegen.CodegenConfig.txt new file mode 100644 index 0000000000..c523742933 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_ignorespace/diff_io.swagger.codegen.CodegenConfig.txt @@ -0,0 +1,20 @@ +==== +1:2c + io.swagger.codegen.languages.AspNet5ServerCodegen +2:2,8c + <<<<<<< HEAD + ||||||| 4479382ced + io.swagger.codegen.languages.AspNet5ServerCodegen + ======= + io.swagger.codegen.languages.ApexClientCodegen + io.swagger.codegen.languages.AspNet5ServerCodegen + >>>>>>> TEMP_RIGHT_BRANCH +3:2c + io.swagger.codegen.languages.ApexClientCodegen +====3 +1:67c +2:73c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen +\ No newline at end of file +3:67c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports/diff_Models.swift.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports/diff_Models.swift.txt new file mode 100644 index 0000000000..8a5c872118 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports/diff_Models.swift.txt @@ -0,0 +1,1866 @@ +====1 +1:14c + case Error(Int, Data?, Error) +2:14,15c +3:14,15c + case HttpError(statusCode: Int, data: Data?, error: Error) + case DecodeError(response: Data?, decodeError: DecodeError) +====1 +1:37a +2:39,60c +3:39,60c + public enum Decoded { + case success(ValueType) + case failure(DecodeError) + } + + public extension Decoded { + var value: ValueType? { + switch self { + case let .success(value): + return value + case .failure: + return nil + } + } + } + + public enum DecodeError { + case typeMismatch(expected: String, actual: String) + case missingKey(key: String) + case parseError(message: String) + } + +====1 +1:42c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> T)) { +2:65c +3:65c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> Decoded)) { +====1 +1:47,50c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> T { + let key = discriminator; + if let decoder = decoders[key] { + return decoder(source, nil) as! T +2:70,73c +3:70,73c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> Decoded { + let key = discriminator + if let decoder = decoders[key], let value = decoder(source, nil) as? Decoded { + return value +====1 +1:52c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:75c +3:75c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:56,58c + static func decode(clazz: [T].Type, source: AnyObject) -> [T] { + let array = source as! [AnyObject] + return array.map { Decoders.decode(clazz: T.self, source: $0, instance: nil) } +2:79,93c +3:79,93c + static func decode(clazz: [T].Type, source: AnyObject) -> Decoded<[T]> { + if let sourceArray = source as? [AnyObject] { + var values = [T]() + for sourceValue in sourceArray { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): + values.append(value) + case let .failure(error): + return .failure(error) + } + } + return .success(values) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } +====1 +1:61,65c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> [Key:T] { + let sourceDictionary = source as! [Key: AnyObject] + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + dictionary[key] = Decoders.decode(clazz: T.self, source: value, instance: nil) +2:96,122c +3:96,122c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> Decoded<[Key:T]> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): + dictionary[key] = value + case let .failure(error): + return .failure(error) + } + } + return .success(dictionary) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } + } + + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + guard !(source is NSNull), source != nil else { return .success(nil) } + if let value = source as? T.RawValue { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "\(T.RawValue.self) matching a case from the enumeration \(T.self)", actual: String(describing: type(of: source)))) +====1 +1:67c + return dictionary +2:123a +3:123a +====1 +1:70c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> T { +2:126c +3:126c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> Decoded { +====1 +1:72,73c + if T.self is Int32.Type && source is NSNumber { + return (source as! NSNumber).int32Value as! T; +2:128,129c +3:128,129c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int32.Type { + return .success(value) +====1 +1:75,76c + if T.self is Int64.Type && source is NSNumber { + return source.int64Value as! T; +2:131,132c +3:131,132c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int64.Type { + return .success(value) +====1 +1:78,79c + if T.self is UUID.Type && source is String { + return UUID(uuidString: source as! String) as! T +2:134,135c +3:134,135c + if let intermediate = source as? String, let value = UUID(uuidString: intermediate) as? T, source is String, T.self is UUID.Type { + return .success(value) +====1 +1:81,82c + if source is T { + return source as! T +2:137,138c +3:137,138c + if let value = source as? T { + return .success(value) +====1 +1:84,85c + if T.self is Data.Type && source is String { + return Data(base64Encoded: source as! String) as! T +2:140,141c +3:140,141c + if let intermediate = source as? String, let value = Data(base64Encoded: intermediate) as? T { + return .success(value) +====1 +1:89,90c + if let decoder = decoders[key] { + return decoder(source, instance) as! T +2:145,146c +3:145,146c + if let decoder = decoders[key], let value = decoder(source, instance) as? Decoded { + return value +====1 +1:92c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:148c +3:148c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:96,102c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> T? { + if source is NSNull { + return nil + } + return source.map { (source: AnyObject) -> T in + Decoders.decode(clazz: clazz, source: source, instance: nil) + } +2:152,154c +3:152,154c + //Convert a Decoded so that its value is optional. DO WE STILL NEED THIS? + static func toOptional(decoded: Decoded) -> Decoded { + return .success(decoded.value) +====1 +1:105,107c + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> [T]? { + if source is NSNull { + return nil +2:157,164c +3:157,164c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + if let source = source, !(source is NSNull) { + switch Decoders.decode(clazz: clazz, source: source, instance: nil) { + case let .success(value): return .success(value) + case let .failure(error): return .failure(error) + } + } else { + return .success(nil) +====1 +1:109,110c + return source.map { (someSource: AnyObject) -> [T] in + Decoders.decode(clazz: clazz, source: someSource) +2:166,179c +3:166,179c + } + + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> Decoded<[T]?> { + if let source = source as? [AnyObject] { + var values = [T]() + for sourceValue in source { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): values.append(value) + case let .failure(error): return .failure(error) + } + } + return .success(values) + } else { + return .success(nil) +====1 +1:114,116c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> [Key:T]? { + if source is NSNull { + return nil +2:183,194c +3:183,194c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> Decoded<[Key:T]?> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): dictionary[key] = value + case let .failure(error): return .failure(error) + } + } + return .success(dictionary) + } else { + return .success(nil) +====1 +1:118,119c + return source.map { (someSource: AnyObject) -> [Key:T] in + Decoders.decode(clazz: clazz, source: someSource) +2:196,206c +3:196,206c + } + + static func decodeOptional(clazz: T, source: AnyObject) -> Decoded { + if let value = source as? U { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "String", actual: String(describing: type(of: source)))) +====1 +1:122a +2:210c +3:210c + +====1 +1:137c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Date in +2:225c +3:225c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:141c + return date +2:229c +3:229c + return .success(date) +====1 +1:145c + if let sourceInt = source as? Int64 { +2:233c +3:233c + if let sourceInt = source as? Int { +====1 +1:147c + return Date(timeIntervalSince1970: Double(sourceInt / 1000) ) +2:235,250c +3:235,250c + return .success(Date(timeIntervalSince1970: Double(sourceInt / 1000) )) + } + if source is String || source is Int { + return .failure(.parseError(message: "Could not decode date")) + } else { + return .failure(.typeMismatch(expected: "String or Int", actual: "\(source)")) + } + } + + // Decoder for ISOFullDate + Decoders.addDecoder(clazz: ISOFullDate.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let string = source as? String, + let isoDate = ISOFullDate.from(string: string) { + return .success(isoDate) + } else { + return .failure(.typeMismatch(expected: "ISO date", actual: "\(source)")) +====1 +1:149c + fatalError("formatter failed to parse \(source)") +2:251a +3:251a +====1 +1:152,155c + // Decoder for [AdditionalPropertiesClass] + Decoders.addDecoder(clazz: [AdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [AdditionalPropertiesClass] in + return Decoders.decode(clazz: [AdditionalPropertiesClass].self, source: source) + } +2:253a +3:253a +====1 +1:157,163c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> AdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + + result.mapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_property"] as AnyObject?) + result.mapOfMapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_of_map_property"] as AnyObject?) + return result +2:255,273c +3:255,273c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: [String:String].self, source: sourceDictionary["map_property"] as AnyObject?) { + + case let .success(value): result.mapProperty = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_of_map_property"] as AnyObject?) { + + case let .success(value): result.mapOfMapProperty = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "AdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:167,170c + // Decoder for [Animal] + Decoders.addDecoder(clazz: [Animal].self) { (source: AnyObject, instance: AnyObject?) -> [Animal] in + return Decoders.decode(clazz: [Animal].self, source: source) + } +2:276a +3:276a +====1 +1:172,176c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Animal in + let sourceDictionary = source as! [AnyHashable: Any] + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal" { + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) +2:278,299c +3:278,299c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal"{ + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) + } + let result = instance == nil ? Animal() : instance as! Animal + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Animal", actual: "\(source)")) +====1 +1:178,182c + let result = instance == nil ? Animal() : instance as! Animal + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + return result +2:300a +3:300a +====1 +1:186,194c + // Decoder for [AnimalFarm] + Decoders.addDecoder(clazz: [AnimalFarm].self) { (source: AnyObject, instance: AnyObject?) -> [AnimalFarm] in + return Decoders.decode(clazz: [AnimalFarm].self, source: source) + } + // Decoder for AnimalFarm + Decoders.addDecoder(clazz: AnimalFarm.self) { (source: AnyObject, instance: AnyObject?) -> AnimalFarm in + let sourceArray = source as! [AnyObject] + return sourceArray.map({ Decoders.decode(clazz: Animal.self, source: $0, instance: nil) }) + } +2:303a +3:303a +====1 +1:197,200c + // Decoder for [ApiResponse] + Decoders.addDecoder(clazz: [ApiResponse].self) { (source: AnyObject, instance: AnyObject?) -> [ApiResponse] in + return Decoders.decode(clazz: [ApiResponse].self, source: source) + } +2:306c +3:306c + +====1 +1:202,209c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> ApiResponse in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + + result.code = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) + result.type = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) + result.message = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) + return result +2:308,332c +3:308,332c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) { + + case let .success(value): result.code = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) { + + case let .success(value): result.type = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) { + + case let .success(value): result.message = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ApiResponse", actual: "\(source)")) + } +====1 +1:213,216c + // Decoder for [ArrayOfArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfArrayOfNumberOnly].self, source: source) + } +2:335a +3:335a +====1 +1:218,223c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + + result.arrayArrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) + return result +2:337,349c +3:337,349c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [[Double]].self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayArrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:227,230c + // Decoder for [ArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfNumberOnly].self, source: source) + } +2:352a +3:352a +====1 +1:232,237c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + + result.arrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayNumber"] as AnyObject?) + return result +2:354,366c +3:354,366c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [Double].self, source: sourceDictionary["ArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:241,244c + // Decoder for [ArrayTest] + Decoders.addDecoder(clazz: [ArrayTest].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayTest] in + return Decoders.decode(clazz: [ArrayTest].self, source: source) + } +2:369a +3:369a +====1 +1:246,253c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> ArrayTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + + result.arrayOfString = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_of_string"] as AnyObject?) + result.arrayArrayOfInteger = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) + result.arrayArrayOfModel = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_model"] as AnyObject?) + return result +2:371,395c +3:371,395c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["array_of_string"] as AnyObject?) { + + case let .success(value): result.arrayOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[Int64]].self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[ReadOnlyFirst]].self, source: sourceDictionary["array_array_of_model"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfModel = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayTest", actual: "\(source)")) + } +====1 +1:257,260c + // Decoder for [Capitalization] + Decoders.addDecoder(clazz: [Capitalization].self) { (source: AnyObject, instance: AnyObject?) -> [Capitalization] in + return Decoders.decode(clazz: [Capitalization].self, source: source) + } +2:398a +3:398a +====1 +1:262,272c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Capitalization in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Capitalization() : instance as! Capitalization + + result.smallCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) + result.capitalCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) + result.smallSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) + result.capitalSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) + result.sCAETHFlowPoints = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) + result.ATT_NAME = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) + return result +2:400,442c +3:400,442c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Capitalization() : instance as! Capitalization + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) { + + case let .success(value): result.smallCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) { + + case let .success(value): result.capitalCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) { + + case let .success(value): result.smallSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) { + + case let .success(value): result.capitalSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) { + + case let .success(value): result.sCAETHFlowPoints = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) { + + case let .success(value): result.ATT_NAME = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Capitalization", actual: "\(source)")) + } +====1 +1:276,279c + // Decoder for [Cat] + Decoders.addDecoder(clazz: [Cat].self) { (source: AnyObject, instance: AnyObject?) -> [Cat] in + return Decoders.decode(clazz: [Cat].self, source: source) + } +2:445a +3:445a +====1 +1:281,291c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Cat in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.declawed = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) + return result +2:447,474c +3:447,474c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) { + + case let .success(value): result.declawed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Cat", actual: "\(source)")) + } +====1 +1:295,298c + // Decoder for [Category] + Decoders.addDecoder(clazz: [Category].self) { (source: AnyObject, instance: AnyObject?) -> [Category] in + return Decoders.decode(clazz: [Category].self, source: source) + } +2:477a +3:477a +====1 +1:300,306c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Category in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Category() : instance as! Category + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:479,497c +3:479,497c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Category() : instance as! Category + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Category", actual: "\(source)")) + } +====1 +1:310,313c + // Decoder for [ClassModel] + Decoders.addDecoder(clazz: [ClassModel].self) { (source: AnyObject, instance: AnyObject?) -> [ClassModel] in + return Decoders.decode(clazz: [ClassModel].self, source: source) + } +2:500a +3:500a +====1 +1:315,320c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> ClassModel in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ClassModel() : instance as! ClassModel + + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) + return result +2:502,514c +3:502,514c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ClassModel() : instance as! ClassModel + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ClassModel", actual: "\(source)")) + } +====1 +1:324,327c + // Decoder for [Client] + Decoders.addDecoder(clazz: [Client].self) { (source: AnyObject, instance: AnyObject?) -> [Client] in + return Decoders.decode(clazz: [Client].self, source: source) + } +2:517a +3:517a +====1 +1:329,334c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Client in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Client() : instance as! Client + + result.client = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) + return result +2:519,531c +3:519,531c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Client() : instance as! Client + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) { + + case let .success(value): result.client = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Client", actual: "\(source)")) + } +====1 +1:338,341c + // Decoder for [Dog] + Decoders.addDecoder(clazz: [Dog].self) { (source: AnyObject, instance: AnyObject?) -> [Dog] in + return Decoders.decode(clazz: [Dog].self, source: source) + } +2:534a +3:534a +====1 +1:343,353c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Dog in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.breed = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) + return result +2:536,563c +3:536,563c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) { + + case let .success(value): result.breed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Dog", actual: "\(source)")) + } +====1 +1:357,360c + // Decoder for [EnumArrays] + Decoders.addDecoder(clazz: [EnumArrays].self) { (source: AnyObject, instance: AnyObject?) -> [EnumArrays] in + return Decoders.decode(clazz: [EnumArrays].self, source: source) + } +2:566a +3:566a +====1 +1:362,371c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> EnumArrays in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + + if let justSymbol = sourceDictionary["just_symbol"] as? String { + result.justSymbol = EnumArrays.JustSymbol(rawValue: (justSymbol)) + } + + if let arrayEnum = sourceDictionary["array_enum"] as? [String] { + result.arrayEnum = arrayEnum.map ({ EnumArrays.ArrayEnum(rawValue: $0)! }) +2:568,585c +3:568,585c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + switch Decoders.decodeOptional(clazz: EnumArrays.JustSymbol.self, source: sourceDictionary["just_symbol"] as AnyObject?) { + + case let .success(value): result.justSymbol = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_enum"] as AnyObject?) { + + case let .success(value): result.arrayEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumArrays", actual: "\(source)")) +====1 +1:373,374c + + return result +2:586a +3:586a +====1 +1:378,381c + // Decoder for [EnumClass] + Decoders.addDecoder(clazz: [EnumClass].self) { (source: AnyObject, instance: AnyObject?) -> [EnumClass] in + return Decoders.decode(clazz: [EnumClass].self, source: source) + } +2:589a +3:589a +====1 +1:383,389c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> EnumClass in + if let source = source as? String { + if let result = EnumClass(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type EnumClass: Maybe swagger file is insufficient") +2:591,593c +3:591,593c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: EnumClass.self, source: source, instance: instance) +====1 +1:393,396c + // Decoder for [EnumTest] + Decoders.addDecoder(clazz: [EnumTest].self) { (source: AnyObject, instance: AnyObject?) -> [EnumTest] in + return Decoders.decode(clazz: [EnumTest].self, source: source) + } +2:596a +3:596a +====1 +1:398,415c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> EnumTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumTest() : instance as! EnumTest + + if let enumString = sourceDictionary["enum_string"] as? String { + result.enumString = EnumTest.EnumString(rawValue: (enumString)) + } + + if let enumInteger = sourceDictionary["enum_integer"] as? Int32 { + result.enumInteger = EnumTest.EnumInteger(rawValue: (enumInteger)) + } + + if let enumNumber = sourceDictionary["enum_number"] as? Double { + result.enumNumber = EnumTest.EnumNumber(rawValue: (enumNumber)) + } + + result.outerEnum = Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) + return result +2:598,628c +3:598,628c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumTest() : instance as! EnumTest + switch Decoders.decodeOptional(clazz: EnumTest.EnumString.self, source: sourceDictionary["enum_string"] as AnyObject?) { + + case let .success(value): result.enumString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumInteger.self, source: sourceDictionary["enum_integer"] as AnyObject?) { + + case let .success(value): result.enumInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumNumber.self, source: sourceDictionary["enum_number"] as AnyObject?) { + + case let .success(value): result.enumNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) { + + case let .success(value): result.outerEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumTest", actual: "\(source)")) + } +====1 +1:419,422c + // Decoder for [FormatTest] + Decoders.addDecoder(clazz: [FormatTest].self) { (source: AnyObject, instance: AnyObject?) -> [FormatTest] in + return Decoders.decode(clazz: [FormatTest].self, source: source) + } +2:631a +3:631a +====1 +1:424,441c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> FormatTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? FormatTest() : instance as! FormatTest + + result.integer = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) + result.int32 = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) + result.int64 = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) + result.number = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) + result.float = Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) + result.double = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) + result.string = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) + result.byte = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) + result.binary = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) + result.date = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["date"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + return result +2:633,717c +3:633,717c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? FormatTest() : instance as! FormatTest + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) { + + case let .success(value): result.integer = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) { + + case let .success(value): result.int32 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) { + + case let .success(value): result.int64 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) { + + case let .success(value): result.number = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) { + + case let .success(value): result.float = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) { + + case let .success(value): result.double = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) { + + case let .success(value): result.string = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) { + + case let .success(value): result.byte = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) { + + case let .success(value): result.binary = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: ISOFullDate.self, source: sourceDictionary["date"] as AnyObject?) { + + case let .success(value): result.date = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "FormatTest", actual: "\(source)")) + } +====1 +1:445,448c + // Decoder for [HasOnlyReadOnly] + Decoders.addDecoder(clazz: [HasOnlyReadOnly].self) { (source: AnyObject, instance: AnyObject?) -> [HasOnlyReadOnly] in + return Decoders.decode(clazz: [HasOnlyReadOnly].self, source: source) + } +2:720a +3:720a +====1 +1:450,456c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> HasOnlyReadOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.foo = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) + return result +2:722,740c +3:722,740c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) { + + case let .success(value): result.foo = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "HasOnlyReadOnly", actual: "\(source)")) + } +====1 +1:460,463c + // Decoder for [List] + Decoders.addDecoder(clazz: [List].self) { (source: AnyObject, instance: AnyObject?) -> [List] in + return Decoders.decode(clazz: [List].self, source: source) + } +2:743a +3:743a +====1 +1:465,470c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> List in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? List() : instance as! List + + result._123List = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) + return result +2:745,757c +3:745,757c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? List() : instance as! List + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) { + + case let .success(value): result._123List = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "List", actual: "\(source)")) + } +====1 +1:474,477c + // Decoder for [MapTest] + Decoders.addDecoder(clazz: [MapTest].self) { (source: AnyObject, instance: AnyObject?) -> [MapTest] in + return Decoders.decode(clazz: [MapTest].self, source: source) + } +2:760a +3:760a +====1 +1:479,484c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> MapTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MapTest() : instance as! MapTest + + result.mapMapOfString = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_map_of_string"] as AnyObject?) + if let mapOfEnumString = sourceDictionary["map_of_enum_string"] as? [String:String] { //TODO: handle enum map scenario +2:762,779c +3:762,779c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MapTest() : instance as! MapTest + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_map_of_string"] as AnyObject?) { + + case let .success(value): result.mapMapOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: MapTest.MapOfEnumString.self, source: sourceDictionary["map_of_enum_string"] as AnyObject?) { + /* + case let .success(value): result.mapOfEnumString = value + case let .failure(error): return .failure(error) + */ default: break //TODO: handle enum map scenario + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MapTest", actual: "\(source)")) +====1 +1:486,487c + + return result +2:780a +3:780a +====1 +1:491,494c + // Decoder for [MixedPropertiesAndAdditionalPropertiesClass] + Decoders.addDecoder(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [MixedPropertiesAndAdditionalPropertiesClass] in + return Decoders.decode(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self, source: source) + } +2:783a +3:783a +====1 +1:496,503c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> MixedPropertiesAndAdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.map = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map"] as AnyObject?) + return result +2:785,809c +3:785,809c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:Animal].self, source: sourceDictionary["map"] as AnyObject?) { + + case let .success(value): result.map = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MixedPropertiesAndAdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:507,510c + // Decoder for [Model200Response] + Decoders.addDecoder(clazz: [Model200Response].self) { (source: AnyObject, instance: AnyObject?) -> [Model200Response] in + return Decoders.decode(clazz: [Model200Response].self, source: source) + } +2:812a +3:812a +====1 +1:512,518c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Model200Response in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Model200Response() : instance as! Model200Response + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) + return result +2:814,832c +3:814,832c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Model200Response() : instance as! Model200Response + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Model200Response", actual: "\(source)")) + } +====1 +1:522,525c + // Decoder for [Name] + Decoders.addDecoder(clazz: [Name].self) { (source: AnyObject, instance: AnyObject?) -> [Name] in + return Decoders.decode(clazz: [Name].self, source: source) + } +2:835a +3:835a +====1 +1:527,535c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Name in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Name() : instance as! Name + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result.snakeCase = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) + result.property = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) + result._123Number = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) + return result +2:837,867c +3:837,867c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Name() : instance as! Name + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) { + + case let .success(value): result.snakeCase = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) { + + case let .success(value): result.property = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) { + + case let .success(value): result._123Number = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Name", actual: "\(source)")) + } +====1 +1:539,542c + // Decoder for [NumberOnly] + Decoders.addDecoder(clazz: [NumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [NumberOnly] in + return Decoders.decode(clazz: [NumberOnly].self, source: source) + } +2:870a +3:870a +====1 +1:544,549c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> NumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + + result.justNumber = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) + return result +2:872,884c +3:872,884c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) { + + case let .success(value): result.justNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "NumberOnly", actual: "\(source)")) + } +====1 +1:553,556c + // Decoder for [Order] + Decoders.addDecoder(clazz: [Order].self) { (source: AnyObject, instance: AnyObject?) -> [Order] in + return Decoders.decode(clazz: [Order].self, source: source) + } +2:887a +3:887a +====1 +1:558,571c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Order in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Order() : instance as! Order + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.petId = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) + result.quantity = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) + result.shipDate = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Order.Status(rawValue: (status)) + } + + result.complete = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) + return result +2:889,931c +3:889,931c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Order() : instance as! Order + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) { + + case let .success(value): result.petId = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) { + + case let .success(value): result.quantity = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) { + + case let .success(value): result.shipDate = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Order.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) { + + case let .success(value): result.complete = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Order", actual: "\(source)")) + } +====1 +1:575,578c + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } +2:934a +3:934a +====1 +1:580c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject) -> OuterBoolean in +2:936c +3:936c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:582c + return source +2:938,940c +3:938,940c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterBoolean", actual: "\(source)")) +====1 +1:584c + fatalError("Source \(source) is not convertible to typealias OuterBoolean: Maybe swagger file is insufficient") +2:941a +3:941a +====1 +1:588,591c + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } +2:944a +3:944a +====1 +1:593,600c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + + let instance = OuterComposite() + instance.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + instance.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + instance.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return instance +2:946,970c +3:946,970c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + switch Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) { + + case let .success(value): result.myNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) { + + case let .success(value): result.myString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) { + + case let .success(value): result.myBoolean = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "OuterComposite", actual: "\(source)")) + } +====1 +1:604,607c + // Decoder for [OuterEnum] + Decoders.addDecoder(clazz: [OuterEnum].self) { (source: AnyObject, instance: AnyObject?) -> [OuterEnum] in + return Decoders.decode(clazz: [OuterEnum].self, source: source) + } +2:973a +3:973a +====1 +1:609,615c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> OuterEnum in + if let source = source as? String { + if let result = OuterEnum(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type OuterEnum: Maybe swagger file is insufficient") +2:975,977c +3:975,977c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: OuterEnum.self, source: source, instance: instance) +====1 +1:619,622c + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + } +2:980a +3:980a +====1 +1:624c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject) -> OuterNumber in +2:982c +3:982c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:626c + return source +2:984,986c +3:984,986c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterNumber", actual: "\(source)")) +====1 +1:628c + fatalError("Source \(source) is not convertible to typealias OuterNumber: Maybe swagger file is insufficient") +2:987a +3:987a +====1 +1:632,635c + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } +2:990a +3:990a +====1 +1:637c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject) -> OuterString in +2:992c +3:992c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:639c + return source +2:994,996c +3:994,996c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterString", actual: "\(source)")) +====1 +1:641c + fatalError("Source \(source) is not convertible to typealias OuterString: Maybe swagger file is insufficient") +2:997a +3:997a +====1 +1:645,648c + // Decoder for [Pet] + Decoders.addDecoder(clazz: [Pet].self) { (source: AnyObject, instance: AnyObject?) -> [Pet] in + return Decoders.decode(clazz: [Pet].self, source: source) + } +2:1000a +3:1000a +====1 +1:650,663c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Pet in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Pet() : instance as! Pet + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.category = Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + result.photoUrls = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["photoUrls"] as AnyObject?) + result.tags = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["tags"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Pet.Status(rawValue: (status)) + } + + return result +2:1002,1044c +3:1002,1044c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Pet() : instance as! Pet + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) { + + case let .success(value): result.category = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["photoUrls"] as AnyObject?) { + + case let .success(value): result.photoUrls = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [Tag].self, source: sourceDictionary["tags"] as AnyObject?) { + + case let .success(value): result.tags = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Pet.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Pet", actual: "\(source)")) + } +====1 +1:667,670c + // Decoder for [ReadOnlyFirst] + Decoders.addDecoder(clazz: [ReadOnlyFirst].self) { (source: AnyObject, instance: AnyObject?) -> [ReadOnlyFirst] in + return Decoders.decode(clazz: [ReadOnlyFirst].self, source: source) + } +2:1047a +3:1047a +====1 +1:672,678c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> ReadOnlyFirst in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.baz = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) + return result +2:1049,1067c +3:1049,1067c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) { + + case let .success(value): result.baz = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ReadOnlyFirst", actual: "\(source)")) + } +====1 +1:682,685c + // Decoder for [Return] + Decoders.addDecoder(clazz: [Return].self) { (source: AnyObject, instance: AnyObject?) -> [Return] in + return Decoders.decode(clazz: [Return].self, source: source) + } +2:1070a +3:1070a +====1 +1:687,692c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Return in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Return() : instance as! Return + + result._return = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) + return result +2:1072,1084c +3:1072,1084c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Return() : instance as! Return + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) { + + case let .success(value): result._return = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Return", actual: "\(source)")) + } +====1 +1:696,699c + // Decoder for [SpecialModelName] + Decoders.addDecoder(clazz: [SpecialModelName].self) { (source: AnyObject, instance: AnyObject?) -> [SpecialModelName] in + return Decoders.decode(clazz: [SpecialModelName].self, source: source) + } +2:1087a +3:1087a +====1 +1:701,706c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> SpecialModelName in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + + result.specialPropertyName = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) + return result +2:1089,1101c +3:1089,1101c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) { + + case let .success(value): result.specialPropertyName = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "SpecialModelName", actual: "\(source)")) + } +====1 +1:710,713c + // Decoder for [Tag] + Decoders.addDecoder(clazz: [Tag].self) { (source: AnyObject, instance: AnyObject?) -> [Tag] in + return Decoders.decode(clazz: [Tag].self, source: source) + } +2:1104a +3:1104a +====1 +1:715,721c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Tag in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Tag() : instance as! Tag + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:1106,1124c +3:1106,1124c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Tag() : instance as! Tag + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Tag", actual: "\(source)")) + } +====1 +1:725,728c + // Decoder for [User] + Decoders.addDecoder(clazz: [User].self) { (source: AnyObject, instance: AnyObject?) -> [User] in + return Decoders.decode(clazz: [User].self, source: source) + } +2:1127a +3:1127a +====1 +1:730,742c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> User in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? User() : instance as! User + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.username = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) + result.firstName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) + result.lastName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) + result.email = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + result.phone = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) + result.userStatus = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) + return result +2:1129,1183c +3:1129,1183c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? User() : instance as! User + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) { + + case let .success(value): result.username = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) { + + case let .success(value): result.firstName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) { + + case let .success(value): result.lastName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) { + + case let .success(value): result.email = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) { + + case let .success(value): result.phone = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) { + + case let .success(value): result.userStatus = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "User", actual: "\(source)")) + } +====1 +1:749c + } +\ No newline at end of file +2:1190c +3:1190c + } diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports/diff_PetApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports/diff_PetApi.php.txt new file mode 100644 index 0000000000..9eb97a1d7a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports/diff_PetApi.php.txt @@ -0,0 +1,1642 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return PetApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->addPetWithHttpInfo($body); + return $response; +2:96c +3:96c + $this->addPetWithHttpInfo($body); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/pet"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/pet'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/json', 'application/xml']); +2:120,124c +3:120,124c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:140a +2:135c +3:135c + +====1 +1:142c + $httpBody = $formParams; // for HTTP post (form) +2:137,149c +3:137,149c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:143a +2:151,162c +3:151,162c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/json', 'application/xml'] + ); + } + +====1 +1:145,146c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:164,165c +3:164,165c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:148c + // make the API Call +2:167,188c +3:167,188c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:150,158c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet' + ); +2:189a +3:189a +====1 +1:160c + return [null, $statusCode, $httpHeader]; +2:191,213c +3:191,213c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:164c + +2:216a +3:216a +====1 +1:168c + +2:219a +3:219a +====1 +1:176a +2:228c +3:228c + * @throws \InvalidArgumentException +====1 +1:181,182c + list($response) = $this->deletePetWithHttpInfo($pet_id, $api_key); + return $response; +2:233c +3:233c + $this->deletePetWithHttpInfo($pet_id, $api_key); +====1 +1:192a +2:244c +3:244c + * @throws \InvalidArgumentException +====1 +1:201,203c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:253,255c +3:253,255c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:206,211c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:258,260c +3:258,260c + $httpBody = ''; + $multipart = false; + $returnType = ''; +====1 +1:215c + $headerParams['api_key'] = $this->apiClient->getSerializer()->toHeaderValue($api_key); +2:264c +3:264c + $headerParams['api_key'] = ObjectSerializer::toHeaderValue($api_key); +====1 +1:216a +2:266c +3:266c + +====1 +1:219,223c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:269c +3:269c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +==== +1:225c + +2:271,272c + + +3:271,272c + + +====1 +1:228a +2:276c +3:276c + +====1 +1:230c + $httpBody = $formParams; // for HTTP post (form) +2:278,301c +3:278,301c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:231a +2:303c +3:303c + +====1 +1:233,234c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:305,306c +3:305,306c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:236c + // make the API Call +2:308,329c +3:308,329c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + +====1 +1:238,246c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet/{petId}' + ); +2:330a +3:330a +====1 +1:248c + return [null, $statusCode, $httpHeader]; +2:332,354c +3:332,354c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:252c + +2:357a +3:357a +====1 +1:256c + +2:360a +3:360a +====1 +1:263a +2:368c +3:368c + * @throws \InvalidArgumentException +====1 +1:278a +2:384c +3:384c + * @throws \InvalidArgumentException +====1 +1:287,289c + // parse inputs + $resourcePath = "/pet/findByStatus"; + $httpBody = ''; +2:393,395c +3:393,395c + + $resourcePath = '/pet/findByStatus'; + $formParams = []; +====1 +1:292,297c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:398,400c +3:398,400c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet[]'; +====1 +1:301c + $status = $this->apiClient->getSerializer()->serializeCollection($status, 'csv', true); +2:404c +3:404c + $status = ObjectSerializer::serializeCollection($status, 'csv', true); +====1 +1:304c + $queryParams['status'] = $this->apiClient->getSerializer()->toQueryValue($status); +2:407c +3:407c + $queryParams['status'] = ObjectSerializer::toQueryValue($status); +==== +1:306c + +2:409,411c + + + +3:409,411c + + + +====1 +1:309a +2:415c +3:415c + +====1 +1:311c + $httpBody = $formParams; // for HTTP post (form) +2:417,440c +3:417,440c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:312a +2:442c +3:442c + +====1 +1:314,315c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:444,445c +3:444,445c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:317c + // make the API Call +2:447,468c +3:447,468c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:319,327c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet[]', + '/pet/findByStatus' + ); +2:469a +3:469a +====1 +1:329c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet[]', $httpHeader), $statusCode, $httpHeader]; +2:471,507c +3:471,507c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:333c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +2:511c +3:511c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +====1 +1:337c + +2:514a +3:514a +====1 +1:341c + +2:517a +3:517a +====1 +1:348a +2:525c +3:525c + * @throws \InvalidArgumentException +====1 +1:363a +2:541c +3:541c + * @throws \InvalidArgumentException +====1 +1:372,374c + // parse inputs + $resourcePath = "/pet/findByTags"; + $httpBody = ''; +2:550,552c +3:550,552c + + $resourcePath = '/pet/findByTags'; + $formParams = []; +====1 +1:377,382c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:555,557c +3:555,557c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet[]'; +====1 +1:386c + $tags = $this->apiClient->getSerializer()->serializeCollection($tags, 'csv', true); +2:561c +3:561c + $tags = ObjectSerializer::serializeCollection($tags, 'csv', true); +====1 +1:389c + $queryParams['tags'] = $this->apiClient->getSerializer()->toQueryValue($tags); +2:564c +3:564c + $queryParams['tags'] = ObjectSerializer::toQueryValue($tags); +==== +1:391c + +2:566,568c + + + +3:566,568c + + + +====1 +1:394a +2:572c +3:572c + +====1 +1:396c + $httpBody = $formParams; // for HTTP post (form) +2:574,586c +3:574,586c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:397a +2:588,599c +3:588,599c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + +====1 +1:399,400c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:601,602c +3:601,602c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:402c + // make the API Call +2:604,625c +3:604,625c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:404,412c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet[]', + '/pet/findByTags' + ); +2:626a +3:626a +====1 +1:414c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet[]', $httpHeader), $statusCode, $httpHeader]; +2:628,664c +3:628,664c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:418c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +2:668c +3:668c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +====1 +1:422c + +2:671a +3:671a +====1 +1:426c + +2:674a +3:674a +====1 +1:433a +2:682c +3:682c + * @throws \InvalidArgumentException +====1 +1:448a +2:698c +3:698c + * @throws \InvalidArgumentException +====1 +1:457,459c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:707,709c +3:707,709c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:462,467c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:712,715c +3:712,715c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet'; + +====1 +1:471,475c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:719c +3:719c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +==== +1:477c + +2:721,722c + + +3:721,722c + + +====1 +1:480a +2:726c +3:726c + +====1 +1:482c + $httpBody = $formParams; // for HTTP post (form) +2:728,740c +3:728,740c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:484,487c + // this endpoint requires API key authentication + $apiKey = $this->apiClient->getApiKeyWithPrefix('api_key'); + if (strlen($apiKey) !== 0) { + $headerParams['api_key'] = $apiKey; +2:742,751c +3:742,751c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:489c + // make the API Call +2:753,780c +3:753,780c + + // this endpoint requires API key authentication + $apiKey = $this->config->getApiKeyWithPrefix('api_key'); + if ($apiKey !== null) { + $headers['api_key'] = $apiKey; + } + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:491,499c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet', + '/pet/{petId}' + ); +2:781a +3:781a +====1 +1:501c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet', $httpHeader), $statusCode, $httpHeader]; +2:783,819c +3:783,819c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:505c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet', $e->getResponseHeaders()); +2:823c +3:823c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet', $e->getResponseHeaders()); +====1 +1:509c + +2:826a +3:826a +====1 +1:513c + +2:829a +3:829a +====1 +1:520a +2:837c +3:837c + * @throws \InvalidArgumentException +====1 +1:525,526c + list($response) = $this->updatePetWithHttpInfo($body); + return $response; +2:842c +3:842c + $this->updatePetWithHttpInfo($body); +====1 +1:535a +2:852c +3:852c + * @throws \InvalidArgumentException +====1 +1:544,546c + // parse inputs + $resourcePath = "/pet"; + $httpBody = ''; +2:861,863c +3:861,863c + + $resourcePath = '/pet'; + $formParams = []; +====1 +1:549,554c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/json', 'application/xml']); +2:866,870c +3:866,870c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:564a +2:881c +3:881c + +====1 +1:566c + $httpBody = $formParams; // for HTTP post (form) +2:883,895c +3:883,895c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:567a +2:897,908c +3:897,908c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/json', 'application/xml'] + ); + } + +====1 +1:569,570c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:910,911c +3:910,911c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:572c + // make the API Call +2:913,934c +3:913,934c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'PUT', + $url, + $headers, + $httpBody + ); + +====1 +1:574,582c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'PUT', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet' + ); +2:935a +3:935a +====1 +1:584c + return [null, $statusCode, $httpHeader]; +2:937,959c +3:937,959c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:588c + +2:962a +3:962a +====1 +1:592c + +2:965a +3:965a +====1 +1:601a +2:975c +3:975c + * @throws \InvalidArgumentException +====1 +1:606,607c + list($response) = $this->updatePetWithFormWithHttpInfo($pet_id, $name, $status); + return $response; +2:980c +3:980c + $this->updatePetWithFormWithHttpInfo($pet_id, $name, $status); +====1 +1:618a +2:992c +3:992c + * @throws \InvalidArgumentException +====1 +1:627,629c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:1001,1003c +3:1001,1003c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:632,637c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/x-www-form-urlencoded']); +2:1006,1009c +3:1006,1009c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:641,645c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:1013c +3:1013c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +====1 +1:646a +2:1015c +3:1015c + +====1 +1:649c + $formParams['name'] = $this->apiClient->getSerializer()->toFormValue($name); +2:1018c +3:1018c + $formParams['name'] = ObjectSerializer::toFormValue($name); +====1 +1:653c + $formParams['status'] = $this->apiClient->getSerializer()->toFormValue($status); +2:1022c +3:1022c + $formParams['status'] = ObjectSerializer::toFormValue($status); +====3 +1:655c +2:1024c + +3:1024c + +====1 +1:658a +2:1028c +3:1028c + +====1 +1:660c + $httpBody = $formParams; // for HTTP post (form) +2:1030,1053c +3:1030,1053c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/x-www-form-urlencoded'] + ); +====1 +1:661a +2:1055c +3:1055c + +====1 +1:663,664c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:1057,1058c +3:1057,1058c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:666c + // make the API Call +2:1060,1081c +3:1060,1081c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:668,676c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet/{petId}' + ); +2:1082a +3:1082a +====1 +1:678c + return [null, $statusCode, $httpHeader]; +2:1084,1106c +3:1084,1106c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:682c + +2:1109a +3:1109a +====1 +1:686c + +2:1112a +3:1112a +====1 +1:695a +2:1122c +3:1122c + * @throws \InvalidArgumentException +====1 +1:712a +2:1140c +3:1140c + * @throws \InvalidArgumentException +====1 +1:721,723c + // parse inputs + $resourcePath = "/pet/{petId}/uploadImage"; + $httpBody = ''; +2:1149,1151c +3:1149,1151c + + $resourcePath = '/pet/{petId}/uploadImage'; + $formParams = []; +====1 +1:726,731c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['multipart/form-data']); +2:1154,1157c +3:1154,1157c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\ApiResponse'; + +====1 +1:735,739c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:1161c +3:1161c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +====1 +1:740a +2:1163c +3:1163c + +====1 +1:743c + $formParams['additionalMetadata'] = $this->apiClient->getSerializer()->toFormValue($additional_metadata); +2:1166c +3:1166c + $formParams['additionalMetadata'] = ObjectSerializer::toFormValue($additional_metadata); +====1 +1:747,753c + // PHP 5.5 introduced a CurlFile object that deprecates the old @filename syntax + // See: https://wiki.php.net/rfc/curl-file-upload + if (function_exists('curl_file_create')) { + $formParams['file'] = curl_file_create($this->apiClient->getSerializer()->toFormValue($file)); + } else { + $formParams['file'] = '@' . $this->apiClient->getSerializer()->toFormValue($file); + } +2:1170,1171c +3:1170,1171c + $multipart = true; + $formParams['file'] = \GuzzleHttp\Psr7\try_fopen(ObjectSerializer::toFormValue($file), 'rb'); +====3 +1:755c +2:1173c + +3:1173c + +====1 +1:758a +2:1177c +3:1177c + +====1 +1:760c + $httpBody = $formParams; // for HTTP post (form) +2:1179,1191c +3:1179,1191c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:761a +2:1193,1204c +3:1193,1204c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/json'], + ['multipart/form-data'] + ); + } + +====1 +1:763,764c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:1206,1207c +3:1206,1207c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:766c + // make the API Call +2:1209,1230c +3:1209,1230c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:768,776c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\ApiResponse', + '/pet/{petId}/uploadImage' + ); +2:1231a +3:1231a +====1 +1:778c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\ApiResponse', $httpHeader), $statusCode, $httpHeader]; +2:1233,1269c +3:1233,1269c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:782c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\ApiResponse', $e->getResponseHeaders()); +2:1273c +3:1273c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\ApiResponse', $e->getResponseHeaders()); +====1 +1:786c + +2:1276a +3:1276a diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports/diff_StoreApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports/diff_StoreApi.php.txt new file mode 100644 index 0000000000..d45a4bcd95 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports/diff_StoreApi.php.txt @@ -0,0 +1,825 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return StoreApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->deleteOrderWithHttpInfo($order_id); + return $response; +2:96c +3:96c + $this->deleteOrderWithHttpInfo($order_id); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/store/order/{order_id}"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/store/order/{order_id}'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:120,123c +3:120,123c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:134,138c + $resourcePath = str_replace( + "{" . "order_id" . "}", + $this->apiClient->getSerializer()->toPathValue($order_id), + $resourcePath + ); +2:127c +3:127c + $resourcePath = str_replace('{' . 'order_id' . '}', ObjectSerializer::toPathValue($order_id), $resourcePath); +==== +1:140c + +2:129,130c + + +3:129,130c + + +====1 +1:143a +2:134c +3:134c + +====1 +1:145c + $httpBody = $formParams; // for HTTP post (form) +2:136,148c +3:136,148c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:147,156c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/store/order/{order_id}' +2:150,158c +3:150,158c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:157a +2:160,207c +3:160,207c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:159c + return [null, $statusCode, $httpHeader]; +2:208a +3:208a +====1 +1:163c + +2:211a +3:211a +====1 +1:167c + +2:214a +3:214a +====1 +1:173a +2:221c +3:221c + * @throws \InvalidArgumentException +====1 +1:187a +2:236c +3:236c + * @throws \InvalidArgumentException +====1 +1:192,194c + // parse inputs + $resourcePath = "/store/inventory"; + $httpBody = ''; +2:241,243c +3:241,243c + + $resourcePath = '/store/inventory'; + $formParams = []; +==== +1:197,204c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); + + +2:246,252c + $httpBody = ''; + $multipart = false; + $returnType = 'map[string,int]'; + + + + +3:246,252c + $httpBody = ''; + $multipart = false; + $returnType = 'map[string,int]'; + + + + +====1 +1:207a +2:256c +3:256c + +====1 +1:209c + $httpBody = $formParams; // for HTTP post (form) +2:258,270c +3:258,270c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:211,214c + // this endpoint requires API key authentication + $apiKey = $this->apiClient->getApiKeyWithPrefix('api_key'); + if (strlen($apiKey) !== 0) { + $headerParams['api_key'] = $apiKey; +2:272,281c +3:272,281c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/json'], + [] + ); +====1 +1:216c + // make the API Call +2:283,310c +3:283,310c + + // this endpoint requires API key authentication + $apiKey = $this->config->getApiKeyWithPrefix('api_key'); + if ($apiKey !== null) { + $headers['api_key'] = $apiKey; + } + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:218,226c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + 'map[string,int]', + '/store/inventory' + ); +2:311a +3:311a +====1 +1:228c + return [$this->apiClient->getSerializer()->deserialize($response, 'map[string,int]', $httpHeader), $statusCode, $httpHeader]; +2:313,349c +3:313,349c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:232c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), 'map[string,int]', $e->getResponseHeaders()); +2:353c +3:353c + $data = ObjectSerializer::deserialize($e->getResponseBody(), 'map[string,int]', $e->getResponseHeaders()); +====1 +1:236c + +2:356a +3:356a +====1 +1:240c + +2:359a +3:359a +====1 +1:247a +2:367c +3:367c + * @throws \InvalidArgumentException +====1 +1:262a +2:383c +3:383c + * @throws \InvalidArgumentException +====1 +1:271c + if (($order_id > 5)) { +2:392c +3:392c + if ($order_id > 5) { +====1 +1:274c + if (($order_id < 1)) { +2:395c +3:395c + if ($order_id < 1) { +====1 +1:278,280c + // parse inputs + $resourcePath = "/store/order/{order_id}"; + $httpBody = ''; +2:399,401c +3:399,401c + + $resourcePath = '/store/order/{order_id}'; + $formParams = []; +====1 +1:283,288c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:404,407c +3:404,407c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Order'; + +====1 +1:292,296c + $resourcePath = str_replace( + "{" . "order_id" . "}", + $this->apiClient->getSerializer()->toPathValue($order_id), + $resourcePath + ); +2:411c +3:411c + $resourcePath = str_replace('{' . 'order_id' . '}', ObjectSerializer::toPathValue($order_id), $resourcePath); +==== +1:298c + +2:413,414c + + +3:413,414c + + +====1 +1:301a +2:418c +3:418c + +====1 +1:303c + $httpBody = $formParams; // for HTTP post (form) +2:420,432c +3:420,432c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:305,314c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Order', + '/store/order/{order_id}' +2:434,442c +3:434,442c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:315a +2:444,505c +3:444,505c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:317c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Order', $httpHeader), $statusCode, $httpHeader]; +2:506a +3:506a +====1 +1:321c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +2:510c +3:510c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +====1 +1:325c + +2:513a +3:513a +====1 +1:329c + +2:516a +3:516a +====1 +1:336a +2:524c +3:524c + * @throws \InvalidArgumentException +====1 +1:351a +2:540c +3:540c + * @throws \InvalidArgumentException +====1 +1:360,362c + // parse inputs + $resourcePath = "/store/order"; + $httpBody = ''; +2:549,551c +3:549,551c + + $resourcePath = '/store/order'; + $formParams = []; +====1 +1:365,370c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:554,558c +3:554,558c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Order'; + + +====1 +1:380a +2:569c +3:569c + +====1 +1:382c + $httpBody = $formParams; // for HTTP post (form) +2:571,583c +3:571,583c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:384,393c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Order', + '/store/order' +2:585,588c +3:585,588c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:394a +2:590,656c +3:590,656c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:396c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Order', $httpHeader), $statusCode, $httpHeader]; +2:657a +3:657a +====1 +1:400c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +2:661c +3:661c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +====1 +1:404c + +2:664a +3:664a diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports/diff_UserApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports/diff_UserApi.php.txt new file mode 100644 index 0000000000..f70f624185 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports/diff_UserApi.php.txt @@ -0,0 +1,1443 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return UserApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->createUserWithHttpInfo($body); + return $response; +2:96c +3:96c + $this->createUserWithHttpInfo($body); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/user"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/user'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:120,124c +3:120,124c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:140a +2:135c +3:135c + +====1 +1:142c + $httpBody = $formParams; // for HTTP post (form) +2:137,149c +3:137,149c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:144,153c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user' +2:151,154c +3:151,154c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:154a +2:156,208c +3:156,208c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:156c + return [null, $statusCode, $httpHeader]; +2:209a +3:209a +====1 +1:160c + +2:212a +3:212a +====1 +1:164c + +2:215a +3:215a +====1 +1:171a +2:223c +3:223c + * @throws \InvalidArgumentException +====1 +1:176,177c + list($response) = $this->createUsersWithArrayInputWithHttpInfo($body); + return $response; +2:228c +3:228c + $this->createUsersWithArrayInputWithHttpInfo($body); +====1 +1:186a +2:238c +3:238c + * @throws \InvalidArgumentException +====1 +1:195,197c + // parse inputs + $resourcePath = "/user/createWithArray"; + $httpBody = ''; +2:247,249c +3:247,249c + + $resourcePath = '/user/createWithArray'; + $formParams = []; +====1 +1:200,205c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:252,256c +3:252,256c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:215a +2:267c +3:267c + +====1 +1:217c + $httpBody = $formParams; // for HTTP post (form) +2:269,281c +3:269,281c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:219,228c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/createWithArray' +2:283,291c +3:283,291c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:229a +2:293,340c +3:293,340c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:231c + return [null, $statusCode, $httpHeader]; +2:341a +3:341a +====1 +1:235c + +2:344a +3:344a +====1 +1:239c + +2:347a +3:347a +====1 +1:246a +2:355c +3:355c + * @throws \InvalidArgumentException +====1 +1:251,252c + list($response) = $this->createUsersWithListInputWithHttpInfo($body); + return $response; +2:360c +3:360c + $this->createUsersWithListInputWithHttpInfo($body); +====1 +1:261a +2:370c +3:370c + * @throws \InvalidArgumentException +====1 +1:270,272c + // parse inputs + $resourcePath = "/user/createWithList"; + $httpBody = ''; +2:379,381c +3:379,381c + + $resourcePath = '/user/createWithList'; + $formParams = []; +====1 +1:275,280c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:384,388c +3:384,388c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:290a +2:399c +3:399c + +====1 +1:292c + $httpBody = $formParams; // for HTTP post (form) +2:401,413c +3:401,413c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:294,303c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/createWithList' +2:415,423c +3:415,423c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:304a +2:425,472c +3:425,472c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:306c + return [null, $statusCode, $httpHeader]; +2:473a +3:473a +====1 +1:310c + +2:476a +3:476a +====1 +1:314c + +2:479a +3:479a +====1 +1:321a +2:487c +3:487c + * @throws \InvalidArgumentException +====1 +1:326,327c + list($response) = $this->deleteUserWithHttpInfo($username); + return $response; +2:492c +3:492c + $this->deleteUserWithHttpInfo($username); +====1 +1:336a +2:502c +3:502c + * @throws \InvalidArgumentException +====1 +1:345,347c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:511,513c +3:511,513c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:350,355c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:516,519c +3:516,519c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:359,363c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:523c +3:523c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +==== +1:365c + +2:525,526c + + +3:525,526c + + +====1 +1:368a +2:530c +3:530c + +====1 +1:370c + $httpBody = $formParams; // for HTTP post (form) +2:532,544c +3:532,544c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:372,381c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/{username}' +2:546,554c +3:546,554c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:382a +2:556,603c +3:556,603c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:384c + return [null, $statusCode, $httpHeader]; +2:604a +3:604a +====1 +1:388c + +2:607a +3:607a +====1 +1:392c + +2:610a +3:610a +====1 +1:399a +2:618c +3:618c + * @throws \InvalidArgumentException +====1 +1:414a +2:634c +3:634c + * @throws \InvalidArgumentException +====1 +1:423,425c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:643,645c +3:643,645c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:428,433c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:648,651c +3:648,651c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\User'; + +====1 +1:437,441c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:655c +3:655c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +==== +1:443c + +2:657,658c + + +3:657,658c + + +====1 +1:446a +2:662c +3:662c + +====1 +1:448c + $httpBody = $formParams; // for HTTP post (form) +2:664,676c +3:664,676c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:450,459c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\User', + '/user/{username}' +2:678,681c +3:678,681c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:460a +2:683,749c +3:683,749c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:462c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\User', $httpHeader), $statusCode, $httpHeader]; +2:750a +3:750a +====1 +1:466c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\User', $e->getResponseHeaders()); +2:754c +3:754c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\User', $e->getResponseHeaders()); +====1 +1:470c + +2:757a +3:757a +====1 +1:474c + +2:760a +3:760a +====1 +1:482a +2:769c +3:769c + * @throws \InvalidArgumentException +====1 +1:498a +2:786c +3:786c + * @throws \InvalidArgumentException +====1 +1:511,513c + // parse inputs + $resourcePath = "/user/login"; + $httpBody = ''; +2:799,801c +3:799,801c + + $resourcePath = '/user/login'; + $formParams = []; +====1 +1:516,521c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:804,806c +3:804,806c + $httpBody = ''; + $multipart = false; + $returnType = 'string'; +====1 +1:525c + $queryParams['username'] = $this->apiClient->getSerializer()->toQueryValue($username); +2:810c +3:810c + $queryParams['username'] = ObjectSerializer::toQueryValue($username); +====1 +1:529c + $queryParams['password'] = $this->apiClient->getSerializer()->toQueryValue($password); +2:814c +3:814c + $queryParams['password'] = ObjectSerializer::toQueryValue($password); +==== +1:531c + +2:816,818c + + + +3:816,818c + + + +====1 +1:534a +2:822c +3:822c + +====1 +1:536c + $httpBody = $formParams; // for HTTP post (form) +2:824,836c +3:824,836c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:538,547c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + 'string', + '/user/login' +2:838,846c +3:838,846c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:548a +2:848,909c +3:848,909c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:550c + return [$this->apiClient->getSerializer()->deserialize($response, 'string', $httpHeader), $statusCode, $httpHeader]; +2:910a +3:910a +====1 +1:554c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), 'string', $e->getResponseHeaders()); +2:914c +3:914c + $data = ObjectSerializer::deserialize($e->getResponseBody(), 'string', $e->getResponseHeaders()); +====1 +1:558c + +2:917a +3:917a +====1 +1:562c + +2:920a +3:920a +====1 +1:568a +2:927c +3:927c + * @throws \InvalidArgumentException +====1 +1:573,574c + list($response) = $this->logoutUserWithHttpInfo(); + return $response; +2:932c +3:932c + $this->logoutUserWithHttpInfo(); +====1 +1:582a +2:941c +3:941c + * @throws \InvalidArgumentException +====1 +1:587,589c + // parse inputs + $resourcePath = "/user/logout"; + $httpBody = ''; +2:946,948c +3:946,948c + + $resourcePath = '/user/logout'; + $formParams = []; +==== +1:592,599c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); + + +2:951,957c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + + + +3:951,957c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + + + +====1 +1:602a +2:961c +3:961c + +====1 +1:604c + $httpBody = $formParams; // for HTTP post (form) +2:963,975c +3:963,975c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:606,615c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/logout' +2:977,980c +3:977,980c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:616a +2:982,1034c +3:982,1034c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:618c + return [null, $statusCode, $httpHeader]; +2:1035a +3:1035a +====1 +1:622c + +2:1038a +3:1038a +====1 +1:626c + +2:1041a +3:1041a +====1 +1:634a +2:1050c +3:1050c + * @throws \InvalidArgumentException +====1 +1:639,640c + list($response) = $this->updateUserWithHttpInfo($username, $body); + return $response; +2:1055c +3:1055c + $this->updateUserWithHttpInfo($username, $body); +====1 +1:650a +2:1066c +3:1066c + * @throws \InvalidArgumentException +====1 +1:663,665c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:1079,1081c +3:1079,1081c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:668,673c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:1084,1087c +3:1084,1087c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:677,681c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:1091c +3:1091c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +====1 +1:682a +2:1093c +3:1093c + +====1 +1:691a +2:1103c +3:1103c + +====1 +1:693c + $httpBody = $formParams; // for HTTP post (form) +2:1105,1117c +3:1105,1117c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:695,704c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'PUT', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/{username}' +2:1119,1127c +3:1119,1127c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:705a +2:1129,1176c +3:1129,1176c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'PUT', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:707c + return [null, $statusCode, $httpHeader]; +2:1177a +3:1177a +====1 +1:711c + +2:1180a +3:1180a diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports/diff_VERSION.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports/diff_io.swagger.codegen.CodegenConfig.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports/diff_io.swagger.codegen.CodegenConfig.txt new file mode 100644 index 0000000000..909e86e0be --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports/diff_io.swagger.codegen.CodegenConfig.txt @@ -0,0 +1,13 @@ +==== +1:2c + io.swagger.codegen.languages.AspNet5ServerCodegen +2:1a +3:2c + io.swagger.codegen.languages.ApexClientCodegen +====3 +1:67c +2:66c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen +\ No newline at end of file +3:67c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports_ignorespace/diff_Models.swift.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports_ignorespace/diff_Models.swift.txt new file mode 100644 index 0000000000..8a5c872118 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports_ignorespace/diff_Models.swift.txt @@ -0,0 +1,1866 @@ +====1 +1:14c + case Error(Int, Data?, Error) +2:14,15c +3:14,15c + case HttpError(statusCode: Int, data: Data?, error: Error) + case DecodeError(response: Data?, decodeError: DecodeError) +====1 +1:37a +2:39,60c +3:39,60c + public enum Decoded { + case success(ValueType) + case failure(DecodeError) + } + + public extension Decoded { + var value: ValueType? { + switch self { + case let .success(value): + return value + case .failure: + return nil + } + } + } + + public enum DecodeError { + case typeMismatch(expected: String, actual: String) + case missingKey(key: String) + case parseError(message: String) + } + +====1 +1:42c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> T)) { +2:65c +3:65c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> Decoded)) { +====1 +1:47,50c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> T { + let key = discriminator; + if let decoder = decoders[key] { + return decoder(source, nil) as! T +2:70,73c +3:70,73c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> Decoded { + let key = discriminator + if let decoder = decoders[key], let value = decoder(source, nil) as? Decoded { + return value +====1 +1:52c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:75c +3:75c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:56,58c + static func decode(clazz: [T].Type, source: AnyObject) -> [T] { + let array = source as! [AnyObject] + return array.map { Decoders.decode(clazz: T.self, source: $0, instance: nil) } +2:79,93c +3:79,93c + static func decode(clazz: [T].Type, source: AnyObject) -> Decoded<[T]> { + if let sourceArray = source as? [AnyObject] { + var values = [T]() + for sourceValue in sourceArray { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): + values.append(value) + case let .failure(error): + return .failure(error) + } + } + return .success(values) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } +====1 +1:61,65c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> [Key:T] { + let sourceDictionary = source as! [Key: AnyObject] + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + dictionary[key] = Decoders.decode(clazz: T.self, source: value, instance: nil) +2:96,122c +3:96,122c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> Decoded<[Key:T]> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): + dictionary[key] = value + case let .failure(error): + return .failure(error) + } + } + return .success(dictionary) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } + } + + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + guard !(source is NSNull), source != nil else { return .success(nil) } + if let value = source as? T.RawValue { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "\(T.RawValue.self) matching a case from the enumeration \(T.self)", actual: String(describing: type(of: source)))) +====1 +1:67c + return dictionary +2:123a +3:123a +====1 +1:70c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> T { +2:126c +3:126c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> Decoded { +====1 +1:72,73c + if T.self is Int32.Type && source is NSNumber { + return (source as! NSNumber).int32Value as! T; +2:128,129c +3:128,129c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int32.Type { + return .success(value) +====1 +1:75,76c + if T.self is Int64.Type && source is NSNumber { + return source.int64Value as! T; +2:131,132c +3:131,132c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int64.Type { + return .success(value) +====1 +1:78,79c + if T.self is UUID.Type && source is String { + return UUID(uuidString: source as! String) as! T +2:134,135c +3:134,135c + if let intermediate = source as? String, let value = UUID(uuidString: intermediate) as? T, source is String, T.self is UUID.Type { + return .success(value) +====1 +1:81,82c + if source is T { + return source as! T +2:137,138c +3:137,138c + if let value = source as? T { + return .success(value) +====1 +1:84,85c + if T.self is Data.Type && source is String { + return Data(base64Encoded: source as! String) as! T +2:140,141c +3:140,141c + if let intermediate = source as? String, let value = Data(base64Encoded: intermediate) as? T { + return .success(value) +====1 +1:89,90c + if let decoder = decoders[key] { + return decoder(source, instance) as! T +2:145,146c +3:145,146c + if let decoder = decoders[key], let value = decoder(source, instance) as? Decoded { + return value +====1 +1:92c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:148c +3:148c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:96,102c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> T? { + if source is NSNull { + return nil + } + return source.map { (source: AnyObject) -> T in + Decoders.decode(clazz: clazz, source: source, instance: nil) + } +2:152,154c +3:152,154c + //Convert a Decoded so that its value is optional. DO WE STILL NEED THIS? + static func toOptional(decoded: Decoded) -> Decoded { + return .success(decoded.value) +====1 +1:105,107c + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> [T]? { + if source is NSNull { + return nil +2:157,164c +3:157,164c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + if let source = source, !(source is NSNull) { + switch Decoders.decode(clazz: clazz, source: source, instance: nil) { + case let .success(value): return .success(value) + case let .failure(error): return .failure(error) + } + } else { + return .success(nil) +====1 +1:109,110c + return source.map { (someSource: AnyObject) -> [T] in + Decoders.decode(clazz: clazz, source: someSource) +2:166,179c +3:166,179c + } + + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> Decoded<[T]?> { + if let source = source as? [AnyObject] { + var values = [T]() + for sourceValue in source { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): values.append(value) + case let .failure(error): return .failure(error) + } + } + return .success(values) + } else { + return .success(nil) +====1 +1:114,116c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> [Key:T]? { + if source is NSNull { + return nil +2:183,194c +3:183,194c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> Decoded<[Key:T]?> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): dictionary[key] = value + case let .failure(error): return .failure(error) + } + } + return .success(dictionary) + } else { + return .success(nil) +====1 +1:118,119c + return source.map { (someSource: AnyObject) -> [Key:T] in + Decoders.decode(clazz: clazz, source: someSource) +2:196,206c +3:196,206c + } + + static func decodeOptional(clazz: T, source: AnyObject) -> Decoded { + if let value = source as? U { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "String", actual: String(describing: type(of: source)))) +====1 +1:122a +2:210c +3:210c + +====1 +1:137c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Date in +2:225c +3:225c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:141c + return date +2:229c +3:229c + return .success(date) +====1 +1:145c + if let sourceInt = source as? Int64 { +2:233c +3:233c + if let sourceInt = source as? Int { +====1 +1:147c + return Date(timeIntervalSince1970: Double(sourceInt / 1000) ) +2:235,250c +3:235,250c + return .success(Date(timeIntervalSince1970: Double(sourceInt / 1000) )) + } + if source is String || source is Int { + return .failure(.parseError(message: "Could not decode date")) + } else { + return .failure(.typeMismatch(expected: "String or Int", actual: "\(source)")) + } + } + + // Decoder for ISOFullDate + Decoders.addDecoder(clazz: ISOFullDate.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let string = source as? String, + let isoDate = ISOFullDate.from(string: string) { + return .success(isoDate) + } else { + return .failure(.typeMismatch(expected: "ISO date", actual: "\(source)")) +====1 +1:149c + fatalError("formatter failed to parse \(source)") +2:251a +3:251a +====1 +1:152,155c + // Decoder for [AdditionalPropertiesClass] + Decoders.addDecoder(clazz: [AdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [AdditionalPropertiesClass] in + return Decoders.decode(clazz: [AdditionalPropertiesClass].self, source: source) + } +2:253a +3:253a +====1 +1:157,163c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> AdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + + result.mapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_property"] as AnyObject?) + result.mapOfMapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_of_map_property"] as AnyObject?) + return result +2:255,273c +3:255,273c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: [String:String].self, source: sourceDictionary["map_property"] as AnyObject?) { + + case let .success(value): result.mapProperty = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_of_map_property"] as AnyObject?) { + + case let .success(value): result.mapOfMapProperty = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "AdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:167,170c + // Decoder for [Animal] + Decoders.addDecoder(clazz: [Animal].self) { (source: AnyObject, instance: AnyObject?) -> [Animal] in + return Decoders.decode(clazz: [Animal].self, source: source) + } +2:276a +3:276a +====1 +1:172,176c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Animal in + let sourceDictionary = source as! [AnyHashable: Any] + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal" { + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) +2:278,299c +3:278,299c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal"{ + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) + } + let result = instance == nil ? Animal() : instance as! Animal + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Animal", actual: "\(source)")) +====1 +1:178,182c + let result = instance == nil ? Animal() : instance as! Animal + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + return result +2:300a +3:300a +====1 +1:186,194c + // Decoder for [AnimalFarm] + Decoders.addDecoder(clazz: [AnimalFarm].self) { (source: AnyObject, instance: AnyObject?) -> [AnimalFarm] in + return Decoders.decode(clazz: [AnimalFarm].self, source: source) + } + // Decoder for AnimalFarm + Decoders.addDecoder(clazz: AnimalFarm.self) { (source: AnyObject, instance: AnyObject?) -> AnimalFarm in + let sourceArray = source as! [AnyObject] + return sourceArray.map({ Decoders.decode(clazz: Animal.self, source: $0, instance: nil) }) + } +2:303a +3:303a +====1 +1:197,200c + // Decoder for [ApiResponse] + Decoders.addDecoder(clazz: [ApiResponse].self) { (source: AnyObject, instance: AnyObject?) -> [ApiResponse] in + return Decoders.decode(clazz: [ApiResponse].self, source: source) + } +2:306c +3:306c + +====1 +1:202,209c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> ApiResponse in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + + result.code = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) + result.type = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) + result.message = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) + return result +2:308,332c +3:308,332c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) { + + case let .success(value): result.code = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) { + + case let .success(value): result.type = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) { + + case let .success(value): result.message = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ApiResponse", actual: "\(source)")) + } +====1 +1:213,216c + // Decoder for [ArrayOfArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfArrayOfNumberOnly].self, source: source) + } +2:335a +3:335a +====1 +1:218,223c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + + result.arrayArrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) + return result +2:337,349c +3:337,349c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [[Double]].self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayArrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:227,230c + // Decoder for [ArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfNumberOnly].self, source: source) + } +2:352a +3:352a +====1 +1:232,237c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + + result.arrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayNumber"] as AnyObject?) + return result +2:354,366c +3:354,366c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [Double].self, source: sourceDictionary["ArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:241,244c + // Decoder for [ArrayTest] + Decoders.addDecoder(clazz: [ArrayTest].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayTest] in + return Decoders.decode(clazz: [ArrayTest].self, source: source) + } +2:369a +3:369a +====1 +1:246,253c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> ArrayTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + + result.arrayOfString = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_of_string"] as AnyObject?) + result.arrayArrayOfInteger = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) + result.arrayArrayOfModel = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_model"] as AnyObject?) + return result +2:371,395c +3:371,395c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["array_of_string"] as AnyObject?) { + + case let .success(value): result.arrayOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[Int64]].self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[ReadOnlyFirst]].self, source: sourceDictionary["array_array_of_model"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfModel = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayTest", actual: "\(source)")) + } +====1 +1:257,260c + // Decoder for [Capitalization] + Decoders.addDecoder(clazz: [Capitalization].self) { (source: AnyObject, instance: AnyObject?) -> [Capitalization] in + return Decoders.decode(clazz: [Capitalization].self, source: source) + } +2:398a +3:398a +====1 +1:262,272c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Capitalization in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Capitalization() : instance as! Capitalization + + result.smallCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) + result.capitalCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) + result.smallSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) + result.capitalSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) + result.sCAETHFlowPoints = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) + result.ATT_NAME = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) + return result +2:400,442c +3:400,442c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Capitalization() : instance as! Capitalization + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) { + + case let .success(value): result.smallCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) { + + case let .success(value): result.capitalCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) { + + case let .success(value): result.smallSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) { + + case let .success(value): result.capitalSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) { + + case let .success(value): result.sCAETHFlowPoints = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) { + + case let .success(value): result.ATT_NAME = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Capitalization", actual: "\(source)")) + } +====1 +1:276,279c + // Decoder for [Cat] + Decoders.addDecoder(clazz: [Cat].self) { (source: AnyObject, instance: AnyObject?) -> [Cat] in + return Decoders.decode(clazz: [Cat].self, source: source) + } +2:445a +3:445a +====1 +1:281,291c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Cat in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.declawed = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) + return result +2:447,474c +3:447,474c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) { + + case let .success(value): result.declawed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Cat", actual: "\(source)")) + } +====1 +1:295,298c + // Decoder for [Category] + Decoders.addDecoder(clazz: [Category].self) { (source: AnyObject, instance: AnyObject?) -> [Category] in + return Decoders.decode(clazz: [Category].self, source: source) + } +2:477a +3:477a +====1 +1:300,306c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Category in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Category() : instance as! Category + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:479,497c +3:479,497c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Category() : instance as! Category + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Category", actual: "\(source)")) + } +====1 +1:310,313c + // Decoder for [ClassModel] + Decoders.addDecoder(clazz: [ClassModel].self) { (source: AnyObject, instance: AnyObject?) -> [ClassModel] in + return Decoders.decode(clazz: [ClassModel].self, source: source) + } +2:500a +3:500a +====1 +1:315,320c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> ClassModel in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ClassModel() : instance as! ClassModel + + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) + return result +2:502,514c +3:502,514c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ClassModel() : instance as! ClassModel + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ClassModel", actual: "\(source)")) + } +====1 +1:324,327c + // Decoder for [Client] + Decoders.addDecoder(clazz: [Client].self) { (source: AnyObject, instance: AnyObject?) -> [Client] in + return Decoders.decode(clazz: [Client].self, source: source) + } +2:517a +3:517a +====1 +1:329,334c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Client in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Client() : instance as! Client + + result.client = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) + return result +2:519,531c +3:519,531c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Client() : instance as! Client + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) { + + case let .success(value): result.client = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Client", actual: "\(source)")) + } +====1 +1:338,341c + // Decoder for [Dog] + Decoders.addDecoder(clazz: [Dog].self) { (source: AnyObject, instance: AnyObject?) -> [Dog] in + return Decoders.decode(clazz: [Dog].self, source: source) + } +2:534a +3:534a +====1 +1:343,353c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Dog in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.breed = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) + return result +2:536,563c +3:536,563c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) { + + case let .success(value): result.breed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Dog", actual: "\(source)")) + } +====1 +1:357,360c + // Decoder for [EnumArrays] + Decoders.addDecoder(clazz: [EnumArrays].self) { (source: AnyObject, instance: AnyObject?) -> [EnumArrays] in + return Decoders.decode(clazz: [EnumArrays].self, source: source) + } +2:566a +3:566a +====1 +1:362,371c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> EnumArrays in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + + if let justSymbol = sourceDictionary["just_symbol"] as? String { + result.justSymbol = EnumArrays.JustSymbol(rawValue: (justSymbol)) + } + + if let arrayEnum = sourceDictionary["array_enum"] as? [String] { + result.arrayEnum = arrayEnum.map ({ EnumArrays.ArrayEnum(rawValue: $0)! }) +2:568,585c +3:568,585c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + switch Decoders.decodeOptional(clazz: EnumArrays.JustSymbol.self, source: sourceDictionary["just_symbol"] as AnyObject?) { + + case let .success(value): result.justSymbol = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_enum"] as AnyObject?) { + + case let .success(value): result.arrayEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumArrays", actual: "\(source)")) +====1 +1:373,374c + + return result +2:586a +3:586a +====1 +1:378,381c + // Decoder for [EnumClass] + Decoders.addDecoder(clazz: [EnumClass].self) { (source: AnyObject, instance: AnyObject?) -> [EnumClass] in + return Decoders.decode(clazz: [EnumClass].self, source: source) + } +2:589a +3:589a +====1 +1:383,389c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> EnumClass in + if let source = source as? String { + if let result = EnumClass(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type EnumClass: Maybe swagger file is insufficient") +2:591,593c +3:591,593c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: EnumClass.self, source: source, instance: instance) +====1 +1:393,396c + // Decoder for [EnumTest] + Decoders.addDecoder(clazz: [EnumTest].self) { (source: AnyObject, instance: AnyObject?) -> [EnumTest] in + return Decoders.decode(clazz: [EnumTest].self, source: source) + } +2:596a +3:596a +====1 +1:398,415c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> EnumTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumTest() : instance as! EnumTest + + if let enumString = sourceDictionary["enum_string"] as? String { + result.enumString = EnumTest.EnumString(rawValue: (enumString)) + } + + if let enumInteger = sourceDictionary["enum_integer"] as? Int32 { + result.enumInteger = EnumTest.EnumInteger(rawValue: (enumInteger)) + } + + if let enumNumber = sourceDictionary["enum_number"] as? Double { + result.enumNumber = EnumTest.EnumNumber(rawValue: (enumNumber)) + } + + result.outerEnum = Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) + return result +2:598,628c +3:598,628c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumTest() : instance as! EnumTest + switch Decoders.decodeOptional(clazz: EnumTest.EnumString.self, source: sourceDictionary["enum_string"] as AnyObject?) { + + case let .success(value): result.enumString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumInteger.self, source: sourceDictionary["enum_integer"] as AnyObject?) { + + case let .success(value): result.enumInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumNumber.self, source: sourceDictionary["enum_number"] as AnyObject?) { + + case let .success(value): result.enumNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) { + + case let .success(value): result.outerEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumTest", actual: "\(source)")) + } +====1 +1:419,422c + // Decoder for [FormatTest] + Decoders.addDecoder(clazz: [FormatTest].self) { (source: AnyObject, instance: AnyObject?) -> [FormatTest] in + return Decoders.decode(clazz: [FormatTest].self, source: source) + } +2:631a +3:631a +====1 +1:424,441c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> FormatTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? FormatTest() : instance as! FormatTest + + result.integer = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) + result.int32 = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) + result.int64 = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) + result.number = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) + result.float = Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) + result.double = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) + result.string = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) + result.byte = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) + result.binary = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) + result.date = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["date"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + return result +2:633,717c +3:633,717c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? FormatTest() : instance as! FormatTest + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) { + + case let .success(value): result.integer = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) { + + case let .success(value): result.int32 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) { + + case let .success(value): result.int64 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) { + + case let .success(value): result.number = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) { + + case let .success(value): result.float = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) { + + case let .success(value): result.double = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) { + + case let .success(value): result.string = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) { + + case let .success(value): result.byte = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) { + + case let .success(value): result.binary = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: ISOFullDate.self, source: sourceDictionary["date"] as AnyObject?) { + + case let .success(value): result.date = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "FormatTest", actual: "\(source)")) + } +====1 +1:445,448c + // Decoder for [HasOnlyReadOnly] + Decoders.addDecoder(clazz: [HasOnlyReadOnly].self) { (source: AnyObject, instance: AnyObject?) -> [HasOnlyReadOnly] in + return Decoders.decode(clazz: [HasOnlyReadOnly].self, source: source) + } +2:720a +3:720a +====1 +1:450,456c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> HasOnlyReadOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.foo = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) + return result +2:722,740c +3:722,740c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) { + + case let .success(value): result.foo = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "HasOnlyReadOnly", actual: "\(source)")) + } +====1 +1:460,463c + // Decoder for [List] + Decoders.addDecoder(clazz: [List].self) { (source: AnyObject, instance: AnyObject?) -> [List] in + return Decoders.decode(clazz: [List].self, source: source) + } +2:743a +3:743a +====1 +1:465,470c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> List in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? List() : instance as! List + + result._123List = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) + return result +2:745,757c +3:745,757c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? List() : instance as! List + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) { + + case let .success(value): result._123List = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "List", actual: "\(source)")) + } +====1 +1:474,477c + // Decoder for [MapTest] + Decoders.addDecoder(clazz: [MapTest].self) { (source: AnyObject, instance: AnyObject?) -> [MapTest] in + return Decoders.decode(clazz: [MapTest].self, source: source) + } +2:760a +3:760a +====1 +1:479,484c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> MapTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MapTest() : instance as! MapTest + + result.mapMapOfString = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_map_of_string"] as AnyObject?) + if let mapOfEnumString = sourceDictionary["map_of_enum_string"] as? [String:String] { //TODO: handle enum map scenario +2:762,779c +3:762,779c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MapTest() : instance as! MapTest + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_map_of_string"] as AnyObject?) { + + case let .success(value): result.mapMapOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: MapTest.MapOfEnumString.self, source: sourceDictionary["map_of_enum_string"] as AnyObject?) { + /* + case let .success(value): result.mapOfEnumString = value + case let .failure(error): return .failure(error) + */ default: break //TODO: handle enum map scenario + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MapTest", actual: "\(source)")) +====1 +1:486,487c + + return result +2:780a +3:780a +====1 +1:491,494c + // Decoder for [MixedPropertiesAndAdditionalPropertiesClass] + Decoders.addDecoder(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [MixedPropertiesAndAdditionalPropertiesClass] in + return Decoders.decode(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self, source: source) + } +2:783a +3:783a +====1 +1:496,503c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> MixedPropertiesAndAdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.map = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map"] as AnyObject?) + return result +2:785,809c +3:785,809c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:Animal].self, source: sourceDictionary["map"] as AnyObject?) { + + case let .success(value): result.map = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MixedPropertiesAndAdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:507,510c + // Decoder for [Model200Response] + Decoders.addDecoder(clazz: [Model200Response].self) { (source: AnyObject, instance: AnyObject?) -> [Model200Response] in + return Decoders.decode(clazz: [Model200Response].self, source: source) + } +2:812a +3:812a +====1 +1:512,518c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Model200Response in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Model200Response() : instance as! Model200Response + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) + return result +2:814,832c +3:814,832c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Model200Response() : instance as! Model200Response + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Model200Response", actual: "\(source)")) + } +====1 +1:522,525c + // Decoder for [Name] + Decoders.addDecoder(clazz: [Name].self) { (source: AnyObject, instance: AnyObject?) -> [Name] in + return Decoders.decode(clazz: [Name].self, source: source) + } +2:835a +3:835a +====1 +1:527,535c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Name in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Name() : instance as! Name + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result.snakeCase = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) + result.property = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) + result._123Number = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) + return result +2:837,867c +3:837,867c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Name() : instance as! Name + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) { + + case let .success(value): result.snakeCase = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) { + + case let .success(value): result.property = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) { + + case let .success(value): result._123Number = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Name", actual: "\(source)")) + } +====1 +1:539,542c + // Decoder for [NumberOnly] + Decoders.addDecoder(clazz: [NumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [NumberOnly] in + return Decoders.decode(clazz: [NumberOnly].self, source: source) + } +2:870a +3:870a +====1 +1:544,549c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> NumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + + result.justNumber = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) + return result +2:872,884c +3:872,884c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) { + + case let .success(value): result.justNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "NumberOnly", actual: "\(source)")) + } +====1 +1:553,556c + // Decoder for [Order] + Decoders.addDecoder(clazz: [Order].self) { (source: AnyObject, instance: AnyObject?) -> [Order] in + return Decoders.decode(clazz: [Order].self, source: source) + } +2:887a +3:887a +====1 +1:558,571c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Order in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Order() : instance as! Order + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.petId = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) + result.quantity = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) + result.shipDate = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Order.Status(rawValue: (status)) + } + + result.complete = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) + return result +2:889,931c +3:889,931c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Order() : instance as! Order + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) { + + case let .success(value): result.petId = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) { + + case let .success(value): result.quantity = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) { + + case let .success(value): result.shipDate = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Order.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) { + + case let .success(value): result.complete = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Order", actual: "\(source)")) + } +====1 +1:575,578c + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } +2:934a +3:934a +====1 +1:580c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject) -> OuterBoolean in +2:936c +3:936c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:582c + return source +2:938,940c +3:938,940c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterBoolean", actual: "\(source)")) +====1 +1:584c + fatalError("Source \(source) is not convertible to typealias OuterBoolean: Maybe swagger file is insufficient") +2:941a +3:941a +====1 +1:588,591c + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } +2:944a +3:944a +====1 +1:593,600c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + + let instance = OuterComposite() + instance.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + instance.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + instance.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return instance +2:946,970c +3:946,970c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + switch Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) { + + case let .success(value): result.myNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) { + + case let .success(value): result.myString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) { + + case let .success(value): result.myBoolean = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "OuterComposite", actual: "\(source)")) + } +====1 +1:604,607c + // Decoder for [OuterEnum] + Decoders.addDecoder(clazz: [OuterEnum].self) { (source: AnyObject, instance: AnyObject?) -> [OuterEnum] in + return Decoders.decode(clazz: [OuterEnum].self, source: source) + } +2:973a +3:973a +====1 +1:609,615c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> OuterEnum in + if let source = source as? String { + if let result = OuterEnum(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type OuterEnum: Maybe swagger file is insufficient") +2:975,977c +3:975,977c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: OuterEnum.self, source: source, instance: instance) +====1 +1:619,622c + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + } +2:980a +3:980a +====1 +1:624c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject) -> OuterNumber in +2:982c +3:982c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:626c + return source +2:984,986c +3:984,986c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterNumber", actual: "\(source)")) +====1 +1:628c + fatalError("Source \(source) is not convertible to typealias OuterNumber: Maybe swagger file is insufficient") +2:987a +3:987a +====1 +1:632,635c + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } +2:990a +3:990a +====1 +1:637c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject) -> OuterString in +2:992c +3:992c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:639c + return source +2:994,996c +3:994,996c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterString", actual: "\(source)")) +====1 +1:641c + fatalError("Source \(source) is not convertible to typealias OuterString: Maybe swagger file is insufficient") +2:997a +3:997a +====1 +1:645,648c + // Decoder for [Pet] + Decoders.addDecoder(clazz: [Pet].self) { (source: AnyObject, instance: AnyObject?) -> [Pet] in + return Decoders.decode(clazz: [Pet].self, source: source) + } +2:1000a +3:1000a +====1 +1:650,663c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Pet in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Pet() : instance as! Pet + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.category = Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + result.photoUrls = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["photoUrls"] as AnyObject?) + result.tags = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["tags"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Pet.Status(rawValue: (status)) + } + + return result +2:1002,1044c +3:1002,1044c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Pet() : instance as! Pet + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) { + + case let .success(value): result.category = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["photoUrls"] as AnyObject?) { + + case let .success(value): result.photoUrls = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [Tag].self, source: sourceDictionary["tags"] as AnyObject?) { + + case let .success(value): result.tags = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Pet.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Pet", actual: "\(source)")) + } +====1 +1:667,670c + // Decoder for [ReadOnlyFirst] + Decoders.addDecoder(clazz: [ReadOnlyFirst].self) { (source: AnyObject, instance: AnyObject?) -> [ReadOnlyFirst] in + return Decoders.decode(clazz: [ReadOnlyFirst].self, source: source) + } +2:1047a +3:1047a +====1 +1:672,678c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> ReadOnlyFirst in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.baz = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) + return result +2:1049,1067c +3:1049,1067c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) { + + case let .success(value): result.baz = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ReadOnlyFirst", actual: "\(source)")) + } +====1 +1:682,685c + // Decoder for [Return] + Decoders.addDecoder(clazz: [Return].self) { (source: AnyObject, instance: AnyObject?) -> [Return] in + return Decoders.decode(clazz: [Return].self, source: source) + } +2:1070a +3:1070a +====1 +1:687,692c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Return in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Return() : instance as! Return + + result._return = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) + return result +2:1072,1084c +3:1072,1084c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Return() : instance as! Return + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) { + + case let .success(value): result._return = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Return", actual: "\(source)")) + } +====1 +1:696,699c + // Decoder for [SpecialModelName] + Decoders.addDecoder(clazz: [SpecialModelName].self) { (source: AnyObject, instance: AnyObject?) -> [SpecialModelName] in + return Decoders.decode(clazz: [SpecialModelName].self, source: source) + } +2:1087a +3:1087a +====1 +1:701,706c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> SpecialModelName in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + + result.specialPropertyName = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) + return result +2:1089,1101c +3:1089,1101c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) { + + case let .success(value): result.specialPropertyName = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "SpecialModelName", actual: "\(source)")) + } +====1 +1:710,713c + // Decoder for [Tag] + Decoders.addDecoder(clazz: [Tag].self) { (source: AnyObject, instance: AnyObject?) -> [Tag] in + return Decoders.decode(clazz: [Tag].self, source: source) + } +2:1104a +3:1104a +====1 +1:715,721c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Tag in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Tag() : instance as! Tag + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:1106,1124c +3:1106,1124c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Tag() : instance as! Tag + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Tag", actual: "\(source)")) + } +====1 +1:725,728c + // Decoder for [User] + Decoders.addDecoder(clazz: [User].self) { (source: AnyObject, instance: AnyObject?) -> [User] in + return Decoders.decode(clazz: [User].self, source: source) + } +2:1127a +3:1127a +====1 +1:730,742c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> User in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? User() : instance as! User + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.username = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) + result.firstName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) + result.lastName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) + result.email = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + result.phone = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) + result.userStatus = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) + return result +2:1129,1183c +3:1129,1183c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? User() : instance as! User + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) { + + case let .success(value): result.username = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) { + + case let .success(value): result.firstName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) { + + case let .success(value): result.lastName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) { + + case let .success(value): result.email = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) { + + case let .success(value): result.phone = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) { + + case let .success(value): result.userStatus = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "User", actual: "\(source)")) + } +====1 +1:749c + } +\ No newline at end of file +2:1190c +3:1190c + } diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports_ignorespace/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports_ignorespace/diff_VERSION.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports_ignorespace/diff_io.swagger.codegen.CodegenConfig.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports_ignorespace/diff_io.swagger.codegen.CodegenConfig.txt new file mode 100644 index 0000000000..909e86e0be --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_ort_imports_ignorespace/diff_io.swagger.codegen.CodegenConfig.txt @@ -0,0 +1,13 @@ +==== +1:2c + io.swagger.codegen.languages.AspNet5ServerCodegen +2:1a +3:2c + io.swagger.codegen.languages.ApexClientCodegen +====3 +1:67c +2:66c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen +\ No newline at end of file +3:67c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_histogram/diff_Models.swift.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_histogram/diff_Models.swift.txt new file mode 100644 index 0000000000..1deb823d65 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_histogram/diff_Models.swift.txt @@ -0,0 +1,2452 @@ +====1 +1:14c + case Error(Int, Data?, Error) +2:14,15c +3:14,15c + case HttpError(statusCode: Int, data: Data?, error: Error) + case DecodeError(response: Data?, decodeError: DecodeError) +====1 +1:37a +2:39,60c +3:39,60c + public enum Decoded { + case success(ValueType) + case failure(DecodeError) + } + + public extension Decoded { + var value: ValueType? { + switch self { + case let .success(value): + return value + case .failure: + return nil + } + } + } + + public enum DecodeError { + case typeMismatch(expected: String, actual: String) + case missingKey(key: String) + case parseError(message: String) + } + +====1 +1:42c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> T)) { +2:65c +3:65c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> Decoded)) { +====1 +1:47,50c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> T { + let key = discriminator; + if let decoder = decoders[key] { + return decoder(source, nil) as! T +2:70,73c +3:70,73c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> Decoded { + let key = discriminator + if let decoder = decoders[key], let value = decoder(source, nil) as? Decoded { + return value +====1 +1:52c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:75c +3:75c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:56,58c + static func decode(clazz: [T].Type, source: AnyObject) -> [T] { + let array = source as! [AnyObject] + return array.map { Decoders.decode(clazz: T.self, source: $0, instance: nil) } +2:79,93c +3:79,93c + static func decode(clazz: [T].Type, source: AnyObject) -> Decoded<[T]> { + if let sourceArray = source as? [AnyObject] { + var values = [T]() + for sourceValue in sourceArray { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): + values.append(value) + case let .failure(error): + return .failure(error) + } + } + return .success(values) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } +====1 +1:61,65c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> [Key:T] { + let sourceDictionary = source as! [Key: AnyObject] + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + dictionary[key] = Decoders.decode(clazz: T.self, source: value, instance: nil) +2:96,122c +3:96,122c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> Decoded<[Key:T]> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): + dictionary[key] = value + case let .failure(error): + return .failure(error) + } + } + return .success(dictionary) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } + } + + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + guard !(source is NSNull), source != nil else { return .success(nil) } + if let value = source as? T.RawValue { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "\(T.RawValue.self) matching a case from the enumeration \(T.self)", actual: String(describing: type(of: source)))) +====1 +1:67c + return dictionary +2:123a +3:123a +====1 +1:70c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> T { +2:126c +3:126c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> Decoded { +====1 +1:72,73c + if T.self is Int32.Type && source is NSNumber { + return (source as! NSNumber).int32Value as! T; +2:128,129c +3:128,129c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int32.Type { + return .success(value) +====1 +1:75,76c + if T.self is Int64.Type && source is NSNumber { + return source.int64Value as! T; +2:131,132c +3:131,132c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int64.Type { + return .success(value) +====1 +1:78,79c + if T.self is UUID.Type && source is String { + return UUID(uuidString: source as! String) as! T +2:134,135c +3:134,135c + if let intermediate = source as? String, let value = UUID(uuidString: intermediate) as? T, source is String, T.self is UUID.Type { + return .success(value) +====1 +1:81,82c + if source is T { + return source as! T +2:137,138c +3:137,138c + if let value = source as? T { + return .success(value) +====1 +1:84,85c + if T.self is Data.Type && source is String { + return Data(base64Encoded: source as! String) as! T +2:140,141c +3:140,141c + if let intermediate = source as? String, let value = Data(base64Encoded: intermediate) as? T { + return .success(value) +====1 +1:89,90c + if let decoder = decoders[key] { + return decoder(source, instance) as! T +2:145,146c +3:145,146c + if let decoder = decoders[key], let value = decoder(source, instance) as? Decoded { + return value +====1 +1:92c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:148c +3:148c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:96,102c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> T? { + if source is NSNull { + return nil + } + return source.map { (source: AnyObject) -> T in + Decoders.decode(clazz: clazz, source: source, instance: nil) + } +2:152,154c +3:152,154c + //Convert a Decoded so that its value is optional. DO WE STILL NEED THIS? + static func toOptional(decoded: Decoded) -> Decoded { + return .success(decoded.value) +====1 +1:105,107c + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> [T]? { + if source is NSNull { + return nil +2:157,164c +3:157,164c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + if let source = source, !(source is NSNull) { + switch Decoders.decode(clazz: clazz, source: source, instance: nil) { + case let .success(value): return .success(value) + case let .failure(error): return .failure(error) + } + } else { + return .success(nil) +====1 +1:109,110c + return source.map { (someSource: AnyObject) -> [T] in + Decoders.decode(clazz: clazz, source: someSource) +2:166,179c +3:166,179c + } + + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> Decoded<[T]?> { + if let source = source as? [AnyObject] { + var values = [T]() + for sourceValue in source { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): values.append(value) + case let .failure(error): return .failure(error) + } + } + return .success(values) + } else { + return .success(nil) +====1 +1:114,116c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> [Key:T]? { + if source is NSNull { + return nil +2:183,194c +3:183,194c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> Decoded<[Key:T]?> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): dictionary[key] = value + case let .failure(error): return .failure(error) + } + } + return .success(dictionary) + } else { + return .success(nil) +====1 +1:118,119c + return source.map { (someSource: AnyObject) -> [Key:T] in + Decoders.decode(clazz: clazz, source: someSource) +2:196,206c +3:196,206c + } + + static func decodeOptional(clazz: T, source: AnyObject) -> Decoded { + if let value = source as? U { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "String", actual: String(describing: type(of: source)))) +====1 +1:122a +2:210c +3:210c + +====1 +1:137c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Date in +2:225c +3:225c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:141c + return date +2:229c +3:229c + return .success(date) +====1 +1:145c + if let sourceInt = source as? Int64 { +2:233c +3:233c + if let sourceInt = source as? Int { +====1 +1:147c + return Date(timeIntervalSince1970: Double(sourceInt / 1000) ) +2:235,250c +3:235,250c + return .success(Date(timeIntervalSince1970: Double(sourceInt / 1000) )) + } + if source is String || source is Int { + return .failure(.parseError(message: "Could not decode date")) + } else { + return .failure(.typeMismatch(expected: "String or Int", actual: "\(source)")) + } + } + + // Decoder for ISOFullDate + Decoders.addDecoder(clazz: ISOFullDate.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let string = source as? String, + let isoDate = ISOFullDate.from(string: string) { + return .success(isoDate) + } else { + return .failure(.typeMismatch(expected: "ISO date", actual: "\(source)")) +====1 +1:149c + fatalError("formatter failed to parse \(source)") +2:251a +3:251a +====1 +1:152,155c + // Decoder for [AdditionalPropertiesClass] + Decoders.addDecoder(clazz: [AdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [AdditionalPropertiesClass] in + return Decoders.decode(clazz: [AdditionalPropertiesClass].self, source: source) + } +2:253a +3:253a +====1 +1:157,163c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> AdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + + result.mapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_property"] as AnyObject?) + result.mapOfMapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_of_map_property"] as AnyObject?) + return result +2:255,273c +3:255,273c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: [String:String].self, source: sourceDictionary["map_property"] as AnyObject?) { + + case let .success(value): result.mapProperty = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_of_map_property"] as AnyObject?) { + + case let .success(value): result.mapOfMapProperty = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "AdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:167,170c + // Decoder for [Animal] + Decoders.addDecoder(clazz: [Animal].self) { (source: AnyObject, instance: AnyObject?) -> [Animal] in + return Decoders.decode(clazz: [Animal].self, source: source) + } +2:276a +3:276a +====1 +1:172,176c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Animal in + let sourceDictionary = source as! [AnyHashable: Any] + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal" { + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) +2:278,299c +3:278,299c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal"{ + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) + } + let result = instance == nil ? Animal() : instance as! Animal + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Animal", actual: "\(source)")) +====1 +1:178,182c + let result = instance == nil ? Animal() : instance as! Animal + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + return result +2:300a +3:300a +====1 +1:186,194c + // Decoder for [AnimalFarm] + Decoders.addDecoder(clazz: [AnimalFarm].self) { (source: AnyObject, instance: AnyObject?) -> [AnimalFarm] in + return Decoders.decode(clazz: [AnimalFarm].self, source: source) + } + // Decoder for AnimalFarm + Decoders.addDecoder(clazz: AnimalFarm.self) { (source: AnyObject, instance: AnyObject?) -> AnimalFarm in + let sourceArray = source as! [AnyObject] + return sourceArray.map({ Decoders.decode(clazz: Animal.self, source: $0, instance: nil) }) + } +2:303a +3:303a +====1 +1:197,200c + // Decoder for [ApiResponse] + Decoders.addDecoder(clazz: [ApiResponse].self) { (source: AnyObject, instance: AnyObject?) -> [ApiResponse] in + return Decoders.decode(clazz: [ApiResponse].self, source: source) + } +2:306c +3:306c + +====1 +1:202,209c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> ApiResponse in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + + result.code = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) + result.type = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) + result.message = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) + return result +2:308,332c +3:308,332c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) { + + case let .success(value): result.code = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) { + + case let .success(value): result.type = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) { + + case let .success(value): result.message = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ApiResponse", actual: "\(source)")) + } +====1 +1:213,216c + // Decoder for [ArrayOfArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfArrayOfNumberOnly].self, source: source) + } +2:335a +3:335a +====1 +1:218,223c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + + result.arrayArrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) + return result +2:337,349c +3:337,349c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [[Double]].self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayArrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:227,230c + // Decoder for [ArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfNumberOnly].self, source: source) + } +2:352a +3:352a +====1 +1:232,237c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + + result.arrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayNumber"] as AnyObject?) + return result +2:354,366c +3:354,366c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [Double].self, source: sourceDictionary["ArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:241,244c + // Decoder for [ArrayTest] + Decoders.addDecoder(clazz: [ArrayTest].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayTest] in + return Decoders.decode(clazz: [ArrayTest].self, source: source) + } +2:369a +3:369a +====1 +1:246,253c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> ArrayTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + + result.arrayOfString = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_of_string"] as AnyObject?) + result.arrayArrayOfInteger = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) + result.arrayArrayOfModel = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_model"] as AnyObject?) + return result +2:371,395c +3:371,395c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["array_of_string"] as AnyObject?) { + + case let .success(value): result.arrayOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[Int64]].self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[ReadOnlyFirst]].self, source: sourceDictionary["array_array_of_model"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfModel = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayTest", actual: "\(source)")) + } +====1 +1:257,260c + // Decoder for [Capitalization] + Decoders.addDecoder(clazz: [Capitalization].self) { (source: AnyObject, instance: AnyObject?) -> [Capitalization] in + return Decoders.decode(clazz: [Capitalization].self, source: source) + } +2:398a +3:398a +====1 +1:262,272c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Capitalization in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Capitalization() : instance as! Capitalization + + result.smallCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) + result.capitalCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) + result.smallSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) + result.capitalSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) + result.sCAETHFlowPoints = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) + result.ATT_NAME = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) + return result +2:400,442c +3:400,442c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Capitalization() : instance as! Capitalization + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) { + + case let .success(value): result.smallCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) { + + case let .success(value): result.capitalCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) { + + case let .success(value): result.smallSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) { + + case let .success(value): result.capitalSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) { + + case let .success(value): result.sCAETHFlowPoints = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) { + + case let .success(value): result.ATT_NAME = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Capitalization", actual: "\(source)")) + } +====1 +1:276,279c + // Decoder for [Cat] + Decoders.addDecoder(clazz: [Cat].self) { (source: AnyObject, instance: AnyObject?) -> [Cat] in + return Decoders.decode(clazz: [Cat].self, source: source) + } +2:445a +3:445a +====1 +1:281,291c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Cat in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.declawed = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) + return result +2:447,474c +3:447,474c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) { + + case let .success(value): result.declawed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Cat", actual: "\(source)")) + } +====1 +1:295,298c + // Decoder for [Category] + Decoders.addDecoder(clazz: [Category].self) { (source: AnyObject, instance: AnyObject?) -> [Category] in + return Decoders.decode(clazz: [Category].self, source: source) + } +2:477a +3:477a +====1 +1:300,306c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Category in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Category() : instance as! Category + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:479,497c +3:479,497c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Category() : instance as! Category + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Category", actual: "\(source)")) + } +====1 +1:310,313c + // Decoder for [ClassModel] + Decoders.addDecoder(clazz: [ClassModel].self) { (source: AnyObject, instance: AnyObject?) -> [ClassModel] in + return Decoders.decode(clazz: [ClassModel].self, source: source) + } +2:500a +3:500a +====1 +1:315,320c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> ClassModel in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ClassModel() : instance as! ClassModel + + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) + return result +2:502,514c +3:502,514c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ClassModel() : instance as! ClassModel + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ClassModel", actual: "\(source)")) + } +====1 +1:324,327c + // Decoder for [Client] + Decoders.addDecoder(clazz: [Client].self) { (source: AnyObject, instance: AnyObject?) -> [Client] in + return Decoders.decode(clazz: [Client].self, source: source) + } +2:517a +3:517a +====1 +1:329,334c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Client in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Client() : instance as! Client + + result.client = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) + return result +2:519,531c +3:519,531c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Client() : instance as! Client + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) { + + case let .success(value): result.client = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Client", actual: "\(source)")) + } +====1 +1:338,341c + // Decoder for [Dog] + Decoders.addDecoder(clazz: [Dog].self) { (source: AnyObject, instance: AnyObject?) -> [Dog] in + return Decoders.decode(clazz: [Dog].self, source: source) + } +2:534a +3:534a +====1 +1:343,353c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Dog in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.breed = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) + return result +2:536,563c +3:536,563c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) { + + case let .success(value): result.breed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Dog", actual: "\(source)")) + } +====1 +1:357,360c + // Decoder for [EnumArrays] + Decoders.addDecoder(clazz: [EnumArrays].self) { (source: AnyObject, instance: AnyObject?) -> [EnumArrays] in + return Decoders.decode(clazz: [EnumArrays].self, source: source) + } +2:566a +3:566a +====1 +1:362,371c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> EnumArrays in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + + if let justSymbol = sourceDictionary["just_symbol"] as? String { + result.justSymbol = EnumArrays.JustSymbol(rawValue: (justSymbol)) + } + + if let arrayEnum = sourceDictionary["array_enum"] as? [String] { + result.arrayEnum = arrayEnum.map ({ EnumArrays.ArrayEnum(rawValue: $0)! }) +2:568,585c +3:568,585c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + switch Decoders.decodeOptional(clazz: EnumArrays.JustSymbol.self, source: sourceDictionary["just_symbol"] as AnyObject?) { + + case let .success(value): result.justSymbol = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_enum"] as AnyObject?) { + + case let .success(value): result.arrayEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumArrays", actual: "\(source)")) +====1 +1:373,374c + + return result +2:586a +3:586a +====1 +1:378,381c + // Decoder for [EnumClass] + Decoders.addDecoder(clazz: [EnumClass].self) { (source: AnyObject, instance: AnyObject?) -> [EnumClass] in + return Decoders.decode(clazz: [EnumClass].self, source: source) + } +2:589a +3:589a +====1 +1:383,389c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> EnumClass in + if let source = source as? String { + if let result = EnumClass(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type EnumClass: Maybe swagger file is insufficient") +2:591,593c +3:591,593c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: EnumClass.self, source: source, instance: instance) +====1 +1:393,396c + // Decoder for [EnumTest] + Decoders.addDecoder(clazz: [EnumTest].self) { (source: AnyObject, instance: AnyObject?) -> [EnumTest] in + return Decoders.decode(clazz: [EnumTest].self, source: source) + } +2:596a +3:596a +==== +1:398,415c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> EnumTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumTest() : instance as! EnumTest + + if let enumString = sourceDictionary["enum_string"] as? String { + result.enumString = EnumTest.EnumString(rawValue: (enumString)) + } + + if let enumInteger = sourceDictionary["enum_integer"] as? Int32 { + result.enumInteger = EnumTest.EnumInteger(rawValue: (enumInteger)) + } + + if let enumNumber = sourceDictionary["enum_number"] as? Double { + result.enumNumber = EnumTest.EnumNumber(rawValue: (enumNumber)) + } + + result.outerEnum = Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) + return result +2:598,1062c + <<<<<<< HEAD + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumTest() : instance as! EnumTest + switch Decoders.decodeOptional(clazz: EnumTest.EnumString.self, source: sourceDictionary["enum_string"] as AnyObject?) { + + case let .success(value): result.enumString = value + case let .failure(error): return .failure(error) + + ||||||| 4479382ced + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> EnumTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumTest() : instance as! EnumTest + + if let enumString = sourceDictionary["enum_string"] as? String { + result.enumString = EnumTest.EnumString(rawValue: (enumString)) + } + + if let enumInteger = sourceDictionary["enum_integer"] as? Int32 { + result.enumInteger = EnumTest.EnumInteger(rawValue: (enumInteger)) + } + + if let enumNumber = sourceDictionary["enum_number"] as? Double { + result.enumNumber = EnumTest.EnumNumber(rawValue: (enumNumber)) + } + + result.outerEnum = Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) + return result + } + + + // Decoder for [FormatTest] + Decoders.addDecoder(clazz: [FormatTest].self) { (source: AnyObject, instance: AnyObject?) -> [FormatTest] in + return Decoders.decode(clazz: [FormatTest].self, source: source) + } + // Decoder for FormatTest + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> FormatTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? FormatTest() : instance as! FormatTest + + result.integer = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) + result.int32 = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) + result.int64 = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) + result.number = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) + result.float = Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) + result.double = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) + result.string = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) + result.byte = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) + result.binary = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) + result.date = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["date"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + return result + } + + + // Decoder for [HasOnlyReadOnly] + Decoders.addDecoder(clazz: [HasOnlyReadOnly].self) { (source: AnyObject, instance: AnyObject?) -> [HasOnlyReadOnly] in + return Decoders.decode(clazz: [HasOnlyReadOnly].self, source: source) + } + // Decoder for HasOnlyReadOnly + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> HasOnlyReadOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.foo = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) + return result + } + + + // Decoder for [List] + Decoders.addDecoder(clazz: [List].self) { (source: AnyObject, instance: AnyObject?) -> [List] in + return Decoders.decode(clazz: [List].self, source: source) + } + // Decoder for List + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> List in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? List() : instance as! List + + result._123List = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) + return result + } + + + // Decoder for [MapTest] + Decoders.addDecoder(clazz: [MapTest].self) { (source: AnyObject, instance: AnyObject?) -> [MapTest] in + return Decoders.decode(clazz: [MapTest].self, source: source) + } + // Decoder for MapTest + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> MapTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MapTest() : instance as! MapTest + + result.mapMapOfString = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_map_of_string"] as AnyObject?) + if let mapOfEnumString = sourceDictionary["map_of_enum_string"] as? [String:String] { //TODO: handle enum map scenario + } + + return result + } + + + // Decoder for [MixedPropertiesAndAdditionalPropertiesClass] + Decoders.addDecoder(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [MixedPropertiesAndAdditionalPropertiesClass] in + return Decoders.decode(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self, source: source) + } + // Decoder for MixedPropertiesAndAdditionalPropertiesClass + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> MixedPropertiesAndAdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.map = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map"] as AnyObject?) + return result + } + + + // Decoder for [Model200Response] + Decoders.addDecoder(clazz: [Model200Response].self) { (source: AnyObject, instance: AnyObject?) -> [Model200Response] in + return Decoders.decode(clazz: [Model200Response].self, source: source) + } + // Decoder for Model200Response + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Model200Response in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Model200Response() : instance as! Model200Response + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) + return result + } + + + // Decoder for [Name] + Decoders.addDecoder(clazz: [Name].self) { (source: AnyObject, instance: AnyObject?) -> [Name] in + return Decoders.decode(clazz: [Name].self, source: source) + } + // Decoder for Name + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Name in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Name() : instance as! Name + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result.snakeCase = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) + result.property = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) + result._123Number = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) + return result + } + + + // Decoder for [NumberOnly] + Decoders.addDecoder(clazz: [NumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [NumberOnly] in + return Decoders.decode(clazz: [NumberOnly].self, source: source) + } + // Decoder for NumberOnly + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> NumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + + result.justNumber = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) + return result + } + + + // Decoder for [Order] + Decoders.addDecoder(clazz: [Order].self) { (source: AnyObject, instance: AnyObject?) -> [Order] in + return Decoders.decode(clazz: [Order].self, source: source) + } + // Decoder for Order + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Order in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Order() : instance as! Order + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.petId = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) + result.quantity = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) + result.shipDate = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Order.Status(rawValue: (status)) + } + + result.complete = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) + return result + } + + + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } + // Decoder for OuterBoolean + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject) -> OuterBoolean in + if let source = source as? Bool { + return source + } + fatalError("Source \(source) is not convertible to typealias OuterBoolean: Maybe swagger file is insufficient") + } + + + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } + // Decoder for OuterComposite + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + + let instance = OuterComposite() + instance.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + instance.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + instance.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return instance + } + + + // Decoder for [OuterEnum] + Decoders.addDecoder(clazz: [OuterEnum].self) { (source: AnyObject, instance: AnyObject?) -> [OuterEnum] in + return Decoders.decode(clazz: [OuterEnum].self, source: source) + } + // Decoder for OuterEnum + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> OuterEnum in + if let source = source as? String { + if let result = OuterEnum(rawValue: source) { + return result + ======= + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> EnumTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumTest() : instance as! EnumTest + + if let enumString = sourceDictionary["enum_string"] as? String { + result.enumString = EnumTest.EnumString(rawValue: (enumString)) + } + + if let enumInteger = sourceDictionary["enum_integer"] as? Int32 { + result.enumInteger = EnumTest.EnumInteger(rawValue: (enumInteger)) + } + + if let enumNumber = sourceDictionary["enum_number"] as? Double { + result.enumNumber = EnumTest.EnumNumber(rawValue: (enumNumber)) + } + + result.outerEnum = Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) + return result + } + + + // Decoder for [FormatTest] + Decoders.addDecoder(clazz: [FormatTest].self) { (source: AnyObject, instance: AnyObject?) -> [FormatTest] in + return Decoders.decode(clazz: [FormatTest].self, source: source) + } + // Decoder for FormatTest + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> FormatTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? FormatTest() : instance as! FormatTest + + result.integer = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) + result.int32 = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) + result.int64 = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) + result.number = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) + result.float = Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) + result.double = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) + result.string = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) + result.byte = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) + result.binary = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) + result.date = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["date"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + return result + } + + + // Decoder for [HasOnlyReadOnly] + Decoders.addDecoder(clazz: [HasOnlyReadOnly].self) { (source: AnyObject, instance: AnyObject?) -> [HasOnlyReadOnly] in + return Decoders.decode(clazz: [HasOnlyReadOnly].self, source: source) + } + // Decoder for HasOnlyReadOnly + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> HasOnlyReadOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.foo = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) + return result + } + + + // Decoder for [List] + Decoders.addDecoder(clazz: [List].self) { (source: AnyObject, instance: AnyObject?) -> [List] in + return Decoders.decode(clazz: [List].self, source: source) + } + // Decoder for List + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> List in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? List() : instance as! List + + result._123List = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) + return result + } + + + // Decoder for [MapTest] + Decoders.addDecoder(clazz: [MapTest].self) { (source: AnyObject, instance: AnyObject?) -> [MapTest] in + return Decoders.decode(clazz: [MapTest].self, source: source) + } + // Decoder for MapTest + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> MapTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MapTest() : instance as! MapTest + + result.mapMapOfString = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_map_of_string"] as AnyObject?) + if let mapOfEnumString = sourceDictionary["map_of_enum_string"] as? [String:String] { //TODO: handle enum map scenario + } + + return result + } + + + // Decoder for [MixedPropertiesAndAdditionalPropertiesClass] + Decoders.addDecoder(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [MixedPropertiesAndAdditionalPropertiesClass] in + return Decoders.decode(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self, source: source) + } + // Decoder for MixedPropertiesAndAdditionalPropertiesClass + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> MixedPropertiesAndAdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.map = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map"] as AnyObject?) + return result + } + + + // Decoder for [Model200Response] + Decoders.addDecoder(clazz: [Model200Response].self) { (source: AnyObject, instance: AnyObject?) -> [Model200Response] in + return Decoders.decode(clazz: [Model200Response].self, source: source) + } + // Decoder for Model200Response + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Model200Response in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Model200Response() : instance as! Model200Response + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) + return result + } + + + // Decoder for [Name] + Decoders.addDecoder(clazz: [Name].self) { (source: AnyObject, instance: AnyObject?) -> [Name] in + return Decoders.decode(clazz: [Name].self, source: source) + } + // Decoder for Name + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Name in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Name() : instance as! Name + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result.snakeCase = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) + result.property = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) + result._123Number = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) + return result + } + + + // Decoder for [NumberOnly] + Decoders.addDecoder(clazz: [NumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [NumberOnly] in + return Decoders.decode(clazz: [NumberOnly].self, source: source) + } + // Decoder for NumberOnly + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> NumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + + result.justNumber = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) + return result + } + + + // Decoder for [Order] + Decoders.addDecoder(clazz: [Order].self) { (source: AnyObject, instance: AnyObject?) -> [Order] in + return Decoders.decode(clazz: [Order].self, source: source) + } + // Decoder for Order + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Order in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Order() : instance as! Order + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.petId = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) + result.quantity = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) + result.shipDate = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Order.Status(rawValue: (status)) + } + + result.complete = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) + return result + } + + + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject, instance: AnyObject?) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } + // Decoder for OuterBoolean + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> OuterBoolean in + if let source = source as? Bool { + return source + } + fatalError("Source \(source) is not convertible to typealias OuterBoolean: Maybe swagger file is insufficient") + } + + + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject, instance: AnyObject?) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } + // Decoder for OuterComposite + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + + result.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + result.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + result.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return result + } + + + // Decoder for [OuterEnum] + Decoders.addDecoder(clazz: [OuterEnum].self) { (source: AnyObject, instance: AnyObject?) -> [OuterEnum] in + return Decoders.decode(clazz: [OuterEnum].self, source: source) + } + // Decoder for OuterEnum + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> OuterEnum in + if let source = source as? String { + if let result = OuterEnum(rawValue: source) { + return result + >>>>>>> TEMP_RIGHT_BRANCH + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumInteger.self, source: sourceDictionary["enum_integer"] as AnyObject?) { + + case let .success(value): result.enumInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumNumber.self, source: sourceDictionary["enum_number"] as AnyObject?) { + + case let .success(value): result.enumNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) { + + case let .success(value): result.outerEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumTest", actual: "\(source)")) + } +3:598,628c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumTest() : instance as! EnumTest + switch Decoders.decodeOptional(clazz: EnumTest.EnumString.self, source: sourceDictionary["enum_string"] as AnyObject?) { + + case let .success(value): result.enumString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumInteger.self, source: sourceDictionary["enum_integer"] as AnyObject?) { + + case let .success(value): result.enumInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumNumber.self, source: sourceDictionary["enum_number"] as AnyObject?) { + + case let .success(value): result.enumNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) { + + case let .success(value): result.outerEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumTest", actual: "\(source)")) + } +==== +1:419,422c + // Decoder for [FormatTest] + Decoders.addDecoder(clazz: [FormatTest].self) { (source: AnyObject, instance: AnyObject?) -> [FormatTest] in + return Decoders.decode(clazz: [FormatTest].self, source: source) + } +2:1066c + <<<<<<< HEAD +3:631a +==== +1:424,441c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> FormatTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? FormatTest() : instance as! FormatTest + + result.integer = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) + result.int32 = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) + result.int64 = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) + result.number = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) + result.float = Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) + result.double = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) + result.string = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) + result.byte = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) + result.binary = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) + result.date = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["date"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + return result +2:1068,1161c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? FormatTest() : instance as! FormatTest + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) { + + case let .success(value): result.integer = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) { + + case let .success(value): result.int32 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) { + + case let .success(value): result.int64 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) { + + case let .success(value): result.number = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) { + + case let .success(value): result.float = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) { + + case let .success(value): result.double = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) { + + case let .success(value): result.string = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) { + + case let .success(value): result.byte = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) { + + case let .success(value): result.binary = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: ISOFullDate.self, source: sourceDictionary["date"] as AnyObject?) { + + case let .success(value): result.date = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "FormatTest", actual: "\(source)")) + } + ||||||| 4479382ced + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + ======= + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject, instance: AnyObject?) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + >>>>>>> TEMP_RIGHT_BRANCH +3:633,717c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? FormatTest() : instance as! FormatTest + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) { + + case let .success(value): result.integer = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) { + + case let .success(value): result.int32 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) { + + case let .success(value): result.int64 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) { + + case let .success(value): result.number = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) { + + case let .success(value): result.float = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) { + + case let .success(value): result.double = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) { + + case let .success(value): result.string = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) { + + case let .success(value): result.byte = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) { + + case let .success(value): result.binary = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: ISOFullDate.self, source: sourceDictionary["date"] as AnyObject?) { + + case let .success(value): result.date = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "FormatTest", actual: "\(source)")) + } +====1 +1:445,448c + // Decoder for [HasOnlyReadOnly] + Decoders.addDecoder(clazz: [HasOnlyReadOnly].self) { (source: AnyObject, instance: AnyObject?) -> [HasOnlyReadOnly] in + return Decoders.decode(clazz: [HasOnlyReadOnly].self, source: source) + } +2:1164a +3:720a +====1 +1:450,456c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> HasOnlyReadOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.foo = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) + return result +2:1166,1184c +3:722,740c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) { + + case let .success(value): result.foo = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "HasOnlyReadOnly", actual: "\(source)")) + } +====1 +1:460,463c + // Decoder for [List] + Decoders.addDecoder(clazz: [List].self) { (source: AnyObject, instance: AnyObject?) -> [List] in + return Decoders.decode(clazz: [List].self, source: source) + } +2:1187a +3:743a +====1 +1:465,470c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> List in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? List() : instance as! List + + result._123List = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) + return result +2:1189,1201c +3:745,757c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? List() : instance as! List + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) { + + case let .success(value): result._123List = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "List", actual: "\(source)")) + } +====1 +1:474,477c + // Decoder for [MapTest] + Decoders.addDecoder(clazz: [MapTest].self) { (source: AnyObject, instance: AnyObject?) -> [MapTest] in + return Decoders.decode(clazz: [MapTest].self, source: source) + } +2:1204a +3:760a +====1 +1:479,484c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> MapTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MapTest() : instance as! MapTest + + result.mapMapOfString = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_map_of_string"] as AnyObject?) + if let mapOfEnumString = sourceDictionary["map_of_enum_string"] as? [String:String] { //TODO: handle enum map scenario +2:1206,1223c +3:762,779c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MapTest() : instance as! MapTest + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_map_of_string"] as AnyObject?) { + + case let .success(value): result.mapMapOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: MapTest.MapOfEnumString.self, source: sourceDictionary["map_of_enum_string"] as AnyObject?) { + /* + case let .success(value): result.mapOfEnumString = value + case let .failure(error): return .failure(error) + */ default: break //TODO: handle enum map scenario + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MapTest", actual: "\(source)")) +====1 +1:486,487c + + return result +2:1224a +3:780a +====1 +1:491,494c + // Decoder for [MixedPropertiesAndAdditionalPropertiesClass] + Decoders.addDecoder(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [MixedPropertiesAndAdditionalPropertiesClass] in + return Decoders.decode(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self, source: source) + } +2:1227a +3:783a +====1 +1:496,503c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> MixedPropertiesAndAdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.map = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map"] as AnyObject?) + return result +2:1229,1253c +3:785,809c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:Animal].self, source: sourceDictionary["map"] as AnyObject?) { + + case let .success(value): result.map = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MixedPropertiesAndAdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:507,510c + // Decoder for [Model200Response] + Decoders.addDecoder(clazz: [Model200Response].self) { (source: AnyObject, instance: AnyObject?) -> [Model200Response] in + return Decoders.decode(clazz: [Model200Response].self, source: source) + } +2:1256a +3:812a +====1 +1:512,518c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Model200Response in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Model200Response() : instance as! Model200Response + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) + return result +2:1258,1276c +3:814,832c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Model200Response() : instance as! Model200Response + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Model200Response", actual: "\(source)")) + } +====1 +1:522,525c + // Decoder for [Name] + Decoders.addDecoder(clazz: [Name].self) { (source: AnyObject, instance: AnyObject?) -> [Name] in + return Decoders.decode(clazz: [Name].self, source: source) + } +2:1279a +3:835a +====1 +1:527,535c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Name in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Name() : instance as! Name + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result.snakeCase = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) + result.property = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) + result._123Number = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) + return result +2:1281,1311c +3:837,867c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Name() : instance as! Name + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) { + + case let .success(value): result.snakeCase = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) { + + case let .success(value): result.property = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) { + + case let .success(value): result._123Number = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Name", actual: "\(source)")) + } +====1 +1:539,542c + // Decoder for [NumberOnly] + Decoders.addDecoder(clazz: [NumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [NumberOnly] in + return Decoders.decode(clazz: [NumberOnly].self, source: source) + } +2:1314a +3:870a +====1 +1:544,549c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> NumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + + result.justNumber = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) + return result +2:1316,1328c +3:872,884c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) { + + case let .success(value): result.justNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "NumberOnly", actual: "\(source)")) + } +====1 +1:553,556c + // Decoder for [Order] + Decoders.addDecoder(clazz: [Order].self) { (source: AnyObject, instance: AnyObject?) -> [Order] in + return Decoders.decode(clazz: [Order].self, source: source) + } +2:1331a +3:887a +====1 +1:558,571c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Order in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Order() : instance as! Order + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.petId = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) + result.quantity = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) + result.shipDate = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Order.Status(rawValue: (status)) + } + + result.complete = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) + return result +2:1333,1375c +3:889,931c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Order() : instance as! Order + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) { + + case let .success(value): result.petId = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) { + + case let .success(value): result.quantity = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) { + + case let .success(value): result.shipDate = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Order.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) { + + case let .success(value): result.complete = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Order", actual: "\(source)")) + } +====1 +1:575,578c + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } +2:1378a +3:934a +====1 +1:580c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject) -> OuterBoolean in +2:1380c +3:936c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:582c + return source +2:1382,1384c +3:938,940c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterBoolean", actual: "\(source)")) +====1 +1:584c + fatalError("Source \(source) is not convertible to typealias OuterBoolean: Maybe swagger file is insufficient") +2:1385a +3:941a +====1 +1:588,591c + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } +2:1388a +3:944a +====1 +1:593,600c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + + let instance = OuterComposite() + instance.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + instance.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + instance.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return instance +2:1390,1414c +3:946,970c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + switch Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) { + + case let .success(value): result.myNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) { + + case let .success(value): result.myString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) { + + case let .success(value): result.myBoolean = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "OuterComposite", actual: "\(source)")) + } +====1 +1:604,607c + // Decoder for [OuterEnum] + Decoders.addDecoder(clazz: [OuterEnum].self) { (source: AnyObject, instance: AnyObject?) -> [OuterEnum] in + return Decoders.decode(clazz: [OuterEnum].self, source: source) + } +2:1417a +3:973a +====1 +1:609,615c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> OuterEnum in + if let source = source as? String { + if let result = OuterEnum(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type OuterEnum: Maybe swagger file is insufficient") +2:1419,1421c +3:975,977c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: OuterEnum.self, source: source, instance: instance) +====1 +1:619,622c + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + } +2:1424a +3:980a +==== +1:624c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject) -> OuterNumber in +2:1426,1432c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject) -> OuterNumber in + ======= + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> OuterNumber in + >>>>>>> TEMP_RIGHT_BRANCH +3:982c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:626c + return source +2:1434,1436c +3:984,986c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterNumber", actual: "\(source)")) +====1 +1:628c + fatalError("Source \(source) is not convertible to typealias OuterNumber: Maybe swagger file is insufficient") +2:1437a +3:987a +==== +1:632,635c + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } +2:1441,1452c + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } + ======= + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject, instance: AnyObject?) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } + >>>>>>> TEMP_RIGHT_BRANCH +3:990a +==== +1:637c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject) -> OuterString in +2:1454,1460c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject) -> OuterString in + ======= + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> OuterString in + >>>>>>> TEMP_RIGHT_BRANCH +3:992c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:639c + return source +2:1462,1464c +3:994,996c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterString", actual: "\(source)")) +====1 +1:641c + fatalError("Source \(source) is not convertible to typealias OuterString: Maybe swagger file is insufficient") +2:1465a +3:997a +====1 +1:645,648c + // Decoder for [Pet] + Decoders.addDecoder(clazz: [Pet].self) { (source: AnyObject, instance: AnyObject?) -> [Pet] in + return Decoders.decode(clazz: [Pet].self, source: source) + } +2:1468a +3:1000a +====1 +1:650,663c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Pet in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Pet() : instance as! Pet + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.category = Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + result.photoUrls = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["photoUrls"] as AnyObject?) + result.tags = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["tags"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Pet.Status(rawValue: (status)) + } + + return result +2:1470,1512c +3:1002,1044c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Pet() : instance as! Pet + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) { + + case let .success(value): result.category = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["photoUrls"] as AnyObject?) { + + case let .success(value): result.photoUrls = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [Tag].self, source: sourceDictionary["tags"] as AnyObject?) { + + case let .success(value): result.tags = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Pet.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Pet", actual: "\(source)")) + } +====1 +1:667,670c + // Decoder for [ReadOnlyFirst] + Decoders.addDecoder(clazz: [ReadOnlyFirst].self) { (source: AnyObject, instance: AnyObject?) -> [ReadOnlyFirst] in + return Decoders.decode(clazz: [ReadOnlyFirst].self, source: source) + } +2:1515a +3:1047a +====1 +1:672,678c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> ReadOnlyFirst in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.baz = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) + return result +2:1517,1535c +3:1049,1067c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) { + + case let .success(value): result.baz = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ReadOnlyFirst", actual: "\(source)")) + } +====1 +1:682,685c + // Decoder for [Return] + Decoders.addDecoder(clazz: [Return].self) { (source: AnyObject, instance: AnyObject?) -> [Return] in + return Decoders.decode(clazz: [Return].self, source: source) + } +2:1538a +3:1070a +====1 +1:687,692c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Return in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Return() : instance as! Return + + result._return = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) + return result +2:1540,1552c +3:1072,1084c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Return() : instance as! Return + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) { + + case let .success(value): result._return = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Return", actual: "\(source)")) + } +====1 +1:696,699c + // Decoder for [SpecialModelName] + Decoders.addDecoder(clazz: [SpecialModelName].self) { (source: AnyObject, instance: AnyObject?) -> [SpecialModelName] in + return Decoders.decode(clazz: [SpecialModelName].self, source: source) + } +2:1555a +3:1087a +====1 +1:701,706c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> SpecialModelName in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + + result.specialPropertyName = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) + return result +2:1557,1569c +3:1089,1101c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) { + + case let .success(value): result.specialPropertyName = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "SpecialModelName", actual: "\(source)")) + } +====1 +1:710,713c + // Decoder for [Tag] + Decoders.addDecoder(clazz: [Tag].self) { (source: AnyObject, instance: AnyObject?) -> [Tag] in + return Decoders.decode(clazz: [Tag].self, source: source) + } +2:1572a +3:1104a +====1 +1:715,721c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Tag in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Tag() : instance as! Tag + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:1574,1592c +3:1106,1124c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Tag() : instance as! Tag + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Tag", actual: "\(source)")) + } +====1 +1:725,728c + // Decoder for [User] + Decoders.addDecoder(clazz: [User].self) { (source: AnyObject, instance: AnyObject?) -> [User] in + return Decoders.decode(clazz: [User].self, source: source) + } +2:1595a +3:1127a +====1 +1:730,742c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> User in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? User() : instance as! User + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.username = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) + result.firstName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) + result.lastName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) + result.email = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + result.phone = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) + result.userStatus = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) + return result +2:1597,1651c +3:1129,1183c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? User() : instance as! User + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) { + + case let .success(value): result.username = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) { + + case let .success(value): result.firstName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) { + + case let .success(value): result.lastName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) { + + case let .success(value): result.email = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) { + + case let .success(value): result.phone = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) { + + case let .success(value): result.userStatus = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "User", actual: "\(source)")) + } +====1 +1:749c + } +\ No newline at end of file +2:1658c +3:1190c + } diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_histogram/diff_PetApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_histogram/diff_PetApi.php.txt new file mode 100644 index 0000000000..8a89185203 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_histogram/diff_PetApi.php.txt @@ -0,0 +1,1662 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return PetApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->addPetWithHttpInfo($body); + return $response; +2:96c +3:96c + $this->addPetWithHttpInfo($body); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/pet"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/pet'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/json', 'application/xml']); +2:120,124c +3:120,124c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:140a +2:135c +3:135c + +====1 +1:142c + $httpBody = $formParams; // for HTTP post (form) +2:137,149c +3:137,149c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:143a +2:151,162c +3:151,162c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/json', 'application/xml'] + ); + } + +====1 +1:145,146c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:164,165c +3:164,165c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:148c + // make the API Call +2:167,188c +3:167,188c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:150,158c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet' + ); +2:189a +3:189a +====1 +1:160c + return [null, $statusCode, $httpHeader]; +2:191,213c +3:191,213c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:164c + +2:216a +3:216a +====1 +1:168c + +2:219a +3:219a +====1 +1:176a +2:228c +3:228c + * @throws \InvalidArgumentException +====1 +1:181,182c + list($response) = $this->deletePetWithHttpInfo($pet_id, $api_key); + return $response; +2:233c +3:233c + $this->deletePetWithHttpInfo($pet_id, $api_key); +====1 +1:192a +2:244c +3:244c + * @throws \InvalidArgumentException +====1 +1:201,203c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:253,255c +3:253,255c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:206,211c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:258,260c +3:258,260c + $httpBody = ''; + $multipart = false; + $returnType = ''; +====1 +1:215c + $headerParams['api_key'] = $this->apiClient->getSerializer()->toHeaderValue($api_key); +2:264c +3:264c + $headerParams['api_key'] = ObjectSerializer::toHeaderValue($api_key); +====1 +1:216a +2:266c +3:266c + +====1 +1:219,223c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:269c +3:269c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +==== +1:225c + +2:271,277c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:271,272c + + +====1 +1:228a +2:281c +3:276c + +====1 +1:230c + $httpBody = $formParams; // for HTTP post (form) +2:283,306c +3:278,301c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:231a +2:308c +3:303c + +====1 +1:233,234c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:310,311c +3:305,306c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:236c + // make the API Call +2:313,334c +3:308,329c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + +====1 +1:238,246c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet/{petId}' + ); +2:335a +3:330a +====1 +1:248c + return [null, $statusCode, $httpHeader]; +2:337,359c +3:332,354c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:252c + +2:362a +3:357a +====1 +1:256c + +2:365a +3:360a +====1 +1:263a +2:373c +3:368c + * @throws \InvalidArgumentException +====1 +1:278a +2:389c +3:384c + * @throws \InvalidArgumentException +====1 +1:287,289c + // parse inputs + $resourcePath = "/pet/findByStatus"; + $httpBody = ''; +2:398,400c +3:393,395c + + $resourcePath = '/pet/findByStatus'; + $formParams = []; +====1 +1:292,297c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:403,405c +3:398,400c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet[]'; +====1 +1:301c + $status = $this->apiClient->getSerializer()->serializeCollection($status, 'csv', true); +2:409c +3:404c + $status = ObjectSerializer::serializeCollection($status, 'csv', true); +====1 +1:304c + $queryParams['status'] = $this->apiClient->getSerializer()->toQueryValue($status); +2:412c +3:407c + $queryParams['status'] = ObjectSerializer::toQueryValue($status); +==== +1:306c + +2:414,421c + + <<<<<<< HEAD + + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:409,411c + + + +====1 +1:309a +2:425c +3:415c + +====1 +1:311c + $httpBody = $formParams; // for HTTP post (form) +2:427,450c +3:417,440c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:312a +2:452c +3:442c + +====1 +1:314,315c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:454,455c +3:444,445c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:317c + // make the API Call +2:457,478c +3:447,468c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:319,327c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet[]', + '/pet/findByStatus' + ); +2:479a +3:469a +====1 +1:329c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet[]', $httpHeader), $statusCode, $httpHeader]; +2:481,517c +3:471,507c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:333c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +2:521c +3:511c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +====1 +1:337c + +2:524a +3:514a +====1 +1:341c + +2:527a +3:517a +====1 +1:348a +2:535c +3:525c + * @throws \InvalidArgumentException +====1 +1:363a +2:551c +3:541c + * @throws \InvalidArgumentException +====1 +1:372,374c + // parse inputs + $resourcePath = "/pet/findByTags"; + $httpBody = ''; +2:560,562c +3:550,552c + + $resourcePath = '/pet/findByTags'; + $formParams = []; +====1 +1:377,382c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:565,567c +3:555,557c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet[]'; +====1 +1:386c + $tags = $this->apiClient->getSerializer()->serializeCollection($tags, 'csv', true); +2:571c +3:561c + $tags = ObjectSerializer::serializeCollection($tags, 'csv', true); +====1 +1:389c + $queryParams['tags'] = $this->apiClient->getSerializer()->toQueryValue($tags); +2:574c +3:564c + $queryParams['tags'] = ObjectSerializer::toQueryValue($tags); +==== +1:391c + +2:576,583c + + <<<<<<< HEAD + + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:566,568c + + + +====1 +1:394a +2:587c +3:572c + +====1 +1:396c + $httpBody = $formParams; // for HTTP post (form) +2:589,601c +3:574,586c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:397a +2:603,614c +3:588,599c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + +====1 +1:399,400c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:616,617c +3:601,602c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:402c + // make the API Call +2:619,640c +3:604,625c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:404,412c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet[]', + '/pet/findByTags' + ); +2:641a +3:626a +====1 +1:414c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet[]', $httpHeader), $statusCode, $httpHeader]; +2:643,679c +3:628,664c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:418c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +2:683c +3:668c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +====1 +1:422c + +2:686a +3:671a +====1 +1:426c + +2:689a +3:674a +====1 +1:433a +2:697c +3:682c + * @throws \InvalidArgumentException +====1 +1:448a +2:713c +3:698c + * @throws \InvalidArgumentException +====1 +1:457,459c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:722,724c +3:707,709c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:462,467c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:727,730c +3:712,715c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet'; + +====1 +1:471,475c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:734c +3:719c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +==== +1:477c + +2:736,742c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:721,722c + + +====1 +1:480a +2:746c +3:726c + +====1 +1:482c + $httpBody = $formParams; // for HTTP post (form) +2:748,760c +3:728,740c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:484,487c + // this endpoint requires API key authentication + $apiKey = $this->apiClient->getApiKeyWithPrefix('api_key'); + if (strlen($apiKey) !== 0) { + $headerParams['api_key'] = $apiKey; +2:762,771c +3:742,751c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:489c + // make the API Call +2:773,800c +3:753,780c + + // this endpoint requires API key authentication + $apiKey = $this->config->getApiKeyWithPrefix('api_key'); + if ($apiKey !== null) { + $headers['api_key'] = $apiKey; + } + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:491,499c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet', + '/pet/{petId}' + ); +2:801a +3:781a +====1 +1:501c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet', $httpHeader), $statusCode, $httpHeader]; +2:803,839c +3:783,819c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:505c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet', $e->getResponseHeaders()); +2:843c +3:823c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet', $e->getResponseHeaders()); +====1 +1:509c + +2:846a +3:826a +====1 +1:513c + +2:849a +3:829a +====1 +1:520a +2:857c +3:837c + * @throws \InvalidArgumentException +====1 +1:525,526c + list($response) = $this->updatePetWithHttpInfo($body); + return $response; +2:862c +3:842c + $this->updatePetWithHttpInfo($body); +====1 +1:535a +2:872c +3:852c + * @throws \InvalidArgumentException +====1 +1:544,546c + // parse inputs + $resourcePath = "/pet"; + $httpBody = ''; +2:881,883c +3:861,863c + + $resourcePath = '/pet'; + $formParams = []; +====1 +1:549,554c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/json', 'application/xml']); +2:886,890c +3:866,870c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:564a +2:901c +3:881c + +====1 +1:566c + $httpBody = $formParams; // for HTTP post (form) +2:903,915c +3:883,895c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:567a +2:917,928c +3:897,908c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/json', 'application/xml'] + ); + } + +====1 +1:569,570c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:930,931c +3:910,911c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:572c + // make the API Call +2:933,954c +3:913,934c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'PUT', + $url, + $headers, + $httpBody + ); + +====1 +1:574,582c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'PUT', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet' + ); +2:955a +3:935a +====1 +1:584c + return [null, $statusCode, $httpHeader]; +2:957,979c +3:937,959c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:588c + +2:982a +3:962a +====1 +1:592c + +2:985a +3:965a +====1 +1:601a +2:995c +3:975c + * @throws \InvalidArgumentException +====1 +1:606,607c + list($response) = $this->updatePetWithFormWithHttpInfo($pet_id, $name, $status); + return $response; +2:1000c +3:980c + $this->updatePetWithFormWithHttpInfo($pet_id, $name, $status); +====1 +1:618a +2:1012c +3:992c + * @throws \InvalidArgumentException +====1 +1:627,629c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:1021,1023c +3:1001,1003c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:632,637c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/x-www-form-urlencoded']); +2:1026,1029c +3:1006,1009c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:641,645c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:1033c +3:1013c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +====1 +1:646a +2:1035c +3:1015c + +====1 +1:649c + $formParams['name'] = $this->apiClient->getSerializer()->toFormValue($name); +2:1038c +3:1018c + $formParams['name'] = ObjectSerializer::toFormValue($name); +====1 +1:653c + $formParams['status'] = $this->apiClient->getSerializer()->toFormValue($status); +2:1042c +3:1022c + $formParams['status'] = ObjectSerializer::toFormValue($status); +====1 +1:655c + +2:1044c +3:1024c + +====1 +1:658a +2:1048c +3:1028c + +====1 +1:660c + $httpBody = $formParams; // for HTTP post (form) +2:1050,1073c +3:1030,1053c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/x-www-form-urlencoded'] + ); +====1 +1:661a +2:1075c +3:1055c + +====1 +1:663,664c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:1077,1078c +3:1057,1058c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:666c + // make the API Call +2:1080,1101c +3:1060,1081c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:668,676c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet/{petId}' + ); +2:1102a +3:1082a +====1 +1:678c + return [null, $statusCode, $httpHeader]; +2:1104,1126c +3:1084,1106c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:682c + +2:1129a +3:1109a +====1 +1:686c + +2:1132a +3:1112a +====1 +1:695a +2:1142c +3:1122c + * @throws \InvalidArgumentException +====1 +1:712a +2:1160c +3:1140c + * @throws \InvalidArgumentException +====1 +1:721,723c + // parse inputs + $resourcePath = "/pet/{petId}/uploadImage"; + $httpBody = ''; +2:1169,1171c +3:1149,1151c + + $resourcePath = '/pet/{petId}/uploadImage'; + $formParams = []; +====1 +1:726,731c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['multipart/form-data']); +2:1174,1177c +3:1154,1157c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\ApiResponse'; + +====1 +1:735,739c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:1181c +3:1161c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +====1 +1:740a +2:1183c +3:1163c + +====1 +1:743c + $formParams['additionalMetadata'] = $this->apiClient->getSerializer()->toFormValue($additional_metadata); +2:1186c +3:1166c + $formParams['additionalMetadata'] = ObjectSerializer::toFormValue($additional_metadata); +====1 +1:747,753c + // PHP 5.5 introduced a CurlFile object that deprecates the old @filename syntax + // See: https://wiki.php.net/rfc/curl-file-upload + if (function_exists('curl_file_create')) { + $formParams['file'] = curl_file_create($this->apiClient->getSerializer()->toFormValue($file)); + } else { + $formParams['file'] = '@' . $this->apiClient->getSerializer()->toFormValue($file); + } +2:1190,1191c +3:1170,1171c + $multipart = true; + $formParams['file'] = \GuzzleHttp\Psr7\try_fopen(ObjectSerializer::toFormValue($file), 'rb'); +====1 +1:755c + +2:1193c +3:1173c + +====1 +1:758a +2:1197c +3:1177c + +====1 +1:760c + $httpBody = $formParams; // for HTTP post (form) +2:1199,1211c +3:1179,1191c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:761a +2:1213,1224c +3:1193,1204c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/json'], + ['multipart/form-data'] + ); + } + +====1 +1:763,764c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:1226,1227c +3:1206,1207c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:766c + // make the API Call +2:1229,1250c +3:1209,1230c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:768,776c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\ApiResponse', + '/pet/{petId}/uploadImage' + ); +2:1251a +3:1231a +====1 +1:778c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\ApiResponse', $httpHeader), $statusCode, $httpHeader]; +2:1253,1289c +3:1233,1269c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:782c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\ApiResponse', $e->getResponseHeaders()); +2:1293c +3:1273c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\ApiResponse', $e->getResponseHeaders()); +====1 +1:786c + +2:1296a +3:1276a diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_histogram/diff_StoreApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_histogram/diff_StoreApi.php.txt new file mode 100644 index 0000000000..f8e1e3ce2b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_histogram/diff_StoreApi.php.txt @@ -0,0 +1,830 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return StoreApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->deleteOrderWithHttpInfo($order_id); + return $response; +2:96c +3:96c + $this->deleteOrderWithHttpInfo($order_id); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/store/order/{order_id}"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/store/order/{order_id}'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:120,123c +3:120,123c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:134,138c + $resourcePath = str_replace( + "{" . "order_id" . "}", + $this->apiClient->getSerializer()->toPathValue($order_id), + $resourcePath + ); +2:127c +3:127c + $resourcePath = str_replace('{' . 'order_id' . '}', ObjectSerializer::toPathValue($order_id), $resourcePath); +==== +1:140c + +2:129,135c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:129,130c + + +====1 +1:143a +2:139c +3:134c + +====1 +1:145c + $httpBody = $formParams; // for HTTP post (form) +2:141,153c +3:136,148c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:147,156c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/store/order/{order_id}' +2:155,163c +3:150,158c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:157a +2:165,212c +3:160,207c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:159c + return [null, $statusCode, $httpHeader]; +2:213a +3:208a +====1 +1:163c + +2:216a +3:211a +====1 +1:167c + +2:219a +3:214a +====1 +1:173a +2:226c +3:221c + * @throws \InvalidArgumentException +====1 +1:187a +2:241c +3:236c + * @throws \InvalidArgumentException +====1 +1:192,194c + // parse inputs + $resourcePath = "/store/inventory"; + $httpBody = ''; +2:246,248c +3:241,243c + + $resourcePath = '/store/inventory'; + $formParams = []; +====1 +1:197,202c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:251,256c +3:246,251c + $httpBody = ''; + $multipart = false; + $returnType = 'map[string,int]'; + + + +====1 +1:204c + +2:257a +3:252a +====1 +1:207a +2:261c +3:256c + +====1 +1:209c + $httpBody = $formParams; // for HTTP post (form) +2:263,275c +3:258,270c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:211,214c + // this endpoint requires API key authentication + $apiKey = $this->apiClient->getApiKeyWithPrefix('api_key'); + if (strlen($apiKey) !== 0) { + $headerParams['api_key'] = $apiKey; +2:277,286c +3:272,281c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/json'], + [] + ); +====1 +1:216c + // make the API Call +2:288,315c +3:283,310c + + // this endpoint requires API key authentication + $apiKey = $this->config->getApiKeyWithPrefix('api_key'); + if ($apiKey !== null) { + $headers['api_key'] = $apiKey; + } + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:218,226c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + 'map[string,int]', + '/store/inventory' + ); +2:316a +3:311a +====1 +1:228c + return [$this->apiClient->getSerializer()->deserialize($response, 'map[string,int]', $httpHeader), $statusCode, $httpHeader]; +2:318,354c +3:313,349c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:232c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), 'map[string,int]', $e->getResponseHeaders()); +2:358c +3:353c + $data = ObjectSerializer::deserialize($e->getResponseBody(), 'map[string,int]', $e->getResponseHeaders()); +====1 +1:236c + +2:361a +3:356a +====1 +1:240c + +2:364a +3:359a +====1 +1:247a +2:372c +3:367c + * @throws \InvalidArgumentException +====1 +1:262a +2:388c +3:383c + * @throws \InvalidArgumentException +====1 +1:271c + if (($order_id > 5)) { +2:397c +3:392c + if ($order_id > 5) { +====1 +1:274c + if (($order_id < 1)) { +2:400c +3:395c + if ($order_id < 1) { +====1 +1:278,280c + // parse inputs + $resourcePath = "/store/order/{order_id}"; + $httpBody = ''; +2:404,406c +3:399,401c + + $resourcePath = '/store/order/{order_id}'; + $formParams = []; +====1 +1:283,288c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:409,412c +3:404,407c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Order'; + +====1 +1:292,296c + $resourcePath = str_replace( + "{" . "order_id" . "}", + $this->apiClient->getSerializer()->toPathValue($order_id), + $resourcePath + ); +2:416c +3:411c + $resourcePath = str_replace('{' . 'order_id' . '}', ObjectSerializer::toPathValue($order_id), $resourcePath); +==== +1:298c + +2:418,424c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:413,414c + + +====1 +1:301a +2:428c +3:418c + +====1 +1:303c + $httpBody = $formParams; // for HTTP post (form) +2:430,442c +3:420,432c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:305,314c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Order', + '/store/order/{order_id}' +2:444,452c +3:434,442c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:315a +2:454,515c +3:444,505c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:317c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Order', $httpHeader), $statusCode, $httpHeader]; +2:516a +3:506a +====1 +1:321c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +2:520c +3:510c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +====1 +1:325c + +2:523a +3:513a +====1 +1:329c + +2:526a +3:516a +====1 +1:336a +2:534c +3:524c + * @throws \InvalidArgumentException +====1 +1:351a +2:550c +3:540c + * @throws \InvalidArgumentException +====1 +1:360,362c + // parse inputs + $resourcePath = "/store/order"; + $httpBody = ''; +2:559,561c +3:549,551c + + $resourcePath = '/store/order'; + $formParams = []; +====1 +1:365,370c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:564,568c +3:554,558c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Order'; + + +====1 +1:380a +2:579c +3:569c + +====1 +1:382c + $httpBody = $formParams; // for HTTP post (form) +2:581,593c +3:571,583c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:384,393c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Order', + '/store/order' +2:595,598c +3:585,588c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:394a +2:600,666c +3:590,656c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:396c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Order', $httpHeader), $statusCode, $httpHeader]; +2:667a +3:657a +====1 +1:400c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +2:671c +3:661c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +====1 +1:404c + +2:674a +3:664a diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_histogram/diff_UserApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_histogram/diff_UserApi.php.txt new file mode 100644 index 0000000000..49eb133815 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_histogram/diff_UserApi.php.txt @@ -0,0 +1,1453 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return UserApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->createUserWithHttpInfo($body); + return $response; +2:96c +3:96c + $this->createUserWithHttpInfo($body); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/user"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/user'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:120,124c +3:120,124c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:140a +2:135c +3:135c + +====1 +1:142c + $httpBody = $formParams; // for HTTP post (form) +2:137,149c +3:137,149c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:144,153c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user' +2:151,154c +3:151,154c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:154a +2:156,208c +3:156,208c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:156c + return [null, $statusCode, $httpHeader]; +2:209a +3:209a +====1 +1:160c + +2:212a +3:212a +====1 +1:164c + +2:215a +3:215a +====1 +1:171a +2:223c +3:223c + * @throws \InvalidArgumentException +====1 +1:176,177c + list($response) = $this->createUsersWithArrayInputWithHttpInfo($body); + return $response; +2:228c +3:228c + $this->createUsersWithArrayInputWithHttpInfo($body); +====1 +1:186a +2:238c +3:238c + * @throws \InvalidArgumentException +====1 +1:195,197c + // parse inputs + $resourcePath = "/user/createWithArray"; + $httpBody = ''; +2:247,249c +3:247,249c + + $resourcePath = '/user/createWithArray'; + $formParams = []; +====1 +1:200,205c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:252,256c +3:252,256c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:215a +2:267c +3:267c + +====1 +1:217c + $httpBody = $formParams; // for HTTP post (form) +2:269,281c +3:269,281c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:219,228c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/createWithArray' +2:283,291c +3:283,291c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:229a +2:293,340c +3:293,340c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:231c + return [null, $statusCode, $httpHeader]; +2:341a +3:341a +====1 +1:235c + +2:344a +3:344a +====1 +1:239c + +2:347a +3:347a +====1 +1:246a +2:355c +3:355c + * @throws \InvalidArgumentException +====1 +1:251,252c + list($response) = $this->createUsersWithListInputWithHttpInfo($body); + return $response; +2:360c +3:360c + $this->createUsersWithListInputWithHttpInfo($body); +====1 +1:261a +2:370c +3:370c + * @throws \InvalidArgumentException +====1 +1:270,272c + // parse inputs + $resourcePath = "/user/createWithList"; + $httpBody = ''; +2:379,381c +3:379,381c + + $resourcePath = '/user/createWithList'; + $formParams = []; +====1 +1:275,280c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:384,388c +3:384,388c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:290a +2:399c +3:399c + +====1 +1:292c + $httpBody = $formParams; // for HTTP post (form) +2:401,413c +3:401,413c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:294,303c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/createWithList' +2:415,423c +3:415,423c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:304a +2:425,472c +3:425,472c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:306c + return [null, $statusCode, $httpHeader]; +2:473a +3:473a +====1 +1:310c + +2:476a +3:476a +====1 +1:314c + +2:479a +3:479a +====1 +1:321a +2:487c +3:487c + * @throws \InvalidArgumentException +====1 +1:326,327c + list($response) = $this->deleteUserWithHttpInfo($username); + return $response; +2:492c +3:492c + $this->deleteUserWithHttpInfo($username); +====1 +1:336a +2:502c +3:502c + * @throws \InvalidArgumentException +====1 +1:345,347c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:511,513c +3:511,513c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:350,355c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:516,519c +3:516,519c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:359,363c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:523c +3:523c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +==== +1:365c + +2:525,531c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:525,526c + + +====1 +1:368a +2:535c +3:530c + +====1 +1:370c + $httpBody = $formParams; // for HTTP post (form) +2:537,549c +3:532,544c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:372,381c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/{username}' +2:551,559c +3:546,554c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:382a +2:561,608c +3:556,603c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:384c + return [null, $statusCode, $httpHeader]; +2:609a +3:604a +====1 +1:388c + +2:612a +3:607a +====1 +1:392c + +2:615a +3:610a +====1 +1:399a +2:623c +3:618c + * @throws \InvalidArgumentException +====1 +1:414a +2:639c +3:634c + * @throws \InvalidArgumentException +====1 +1:423,425c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:648,650c +3:643,645c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:428,433c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:653,656c +3:648,651c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\User'; + +====1 +1:437,441c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:660c +3:655c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +==== +1:443c + +2:662,668c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:657,658c + + +====1 +1:446a +2:672c +3:662c + +====1 +1:448c + $httpBody = $formParams; // for HTTP post (form) +2:674,686c +3:664,676c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:450,459c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\User', + '/user/{username}' +2:688,691c +3:678,681c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:460a +2:693,759c +3:683,749c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:462c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\User', $httpHeader), $statusCode, $httpHeader]; +2:760a +3:750a +====1 +1:466c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\User', $e->getResponseHeaders()); +2:764c +3:754c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\User', $e->getResponseHeaders()); +====1 +1:470c + +2:767a +3:757a +====1 +1:474c + +2:770a +3:760a +====1 +1:482a +2:779c +3:769c + * @throws \InvalidArgumentException +====1 +1:498a +2:796c +3:786c + * @throws \InvalidArgumentException +====1 +1:511,513c + // parse inputs + $resourcePath = "/user/login"; + $httpBody = ''; +2:809,811c +3:799,801c + + $resourcePath = '/user/login'; + $formParams = []; +====1 +1:516,521c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:814,816c +3:804,806c + $httpBody = ''; + $multipart = false; + $returnType = 'string'; +====1 +1:525c + $queryParams['username'] = $this->apiClient->getSerializer()->toQueryValue($username); +2:820c +3:810c + $queryParams['username'] = ObjectSerializer::toQueryValue($username); +====1 +1:529c + $queryParams['password'] = $this->apiClient->getSerializer()->toQueryValue($password); +2:824c +3:814c + $queryParams['password'] = ObjectSerializer::toQueryValue($password); +==== +1:531c + +2:826,833c + + <<<<<<< HEAD + + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:816,818c + + + +====1 +1:534a +2:837c +3:822c + +====1 +1:536c + $httpBody = $formParams; // for HTTP post (form) +2:839,851c +3:824,836c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:538,547c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + 'string', + '/user/login' +2:853,861c +3:838,846c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:548a +2:863,924c +3:848,909c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:550c + return [$this->apiClient->getSerializer()->deserialize($response, 'string', $httpHeader), $statusCode, $httpHeader]; +2:925a +3:910a +====1 +1:554c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), 'string', $e->getResponseHeaders()); +2:929c +3:914c + $data = ObjectSerializer::deserialize($e->getResponseBody(), 'string', $e->getResponseHeaders()); +====1 +1:558c + +2:932a +3:917a +====1 +1:562c + +2:935a +3:920a +====1 +1:568a +2:942c +3:927c + * @throws \InvalidArgumentException +====1 +1:573,574c + list($response) = $this->logoutUserWithHttpInfo(); + return $response; +2:947c +3:932c + $this->logoutUserWithHttpInfo(); +====1 +1:582a +2:956c +3:941c + * @throws \InvalidArgumentException +====1 +1:587,589c + // parse inputs + $resourcePath = "/user/logout"; + $httpBody = ''; +2:961,963c +3:946,948c + + $resourcePath = '/user/logout'; + $formParams = []; +====1 +1:592,597c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:966,971c +3:951,956c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + + +====1 +1:599c + +2:972a +3:957a +====1 +1:602a +2:976c +3:961c + +====1 +1:604c + $httpBody = $formParams; // for HTTP post (form) +2:978,990c +3:963,975c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:606,615c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/logout' +2:992,995c +3:977,980c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:616a +2:997,1049c +3:982,1034c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:618c + return [null, $statusCode, $httpHeader]; +2:1050a +3:1035a +====1 +1:622c + +2:1053a +3:1038a +====1 +1:626c + +2:1056a +3:1041a +====1 +1:634a +2:1065c +3:1050c + * @throws \InvalidArgumentException +====1 +1:639,640c + list($response) = $this->updateUserWithHttpInfo($username, $body); + return $response; +2:1070c +3:1055c + $this->updateUserWithHttpInfo($username, $body); +====1 +1:650a +2:1081c +3:1066c + * @throws \InvalidArgumentException +====1 +1:663,665c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:1094,1096c +3:1079,1081c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:668,673c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:1099,1102c +3:1084,1087c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:677,681c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:1106c +3:1091c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +====1 +1:682a +2:1108c +3:1093c + +====1 +1:691a +2:1118c +3:1103c + +====1 +1:693c + $httpBody = $formParams; // for HTTP post (form) +2:1120,1132c +3:1105,1117c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:695,704c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'PUT', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/{username}' +2:1134,1142c +3:1119,1127c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:705a +2:1144,1191c +3:1129,1176c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'PUT', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:707c + return [null, $statusCode, $httpHeader]; +2:1192a +3:1177a +====1 +1:711c + +2:1195a +3:1180a diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_histogram/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_histogram/diff_VERSION.txt new file mode 100644 index 0000000000..ce107c7353 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_histogram/diff_VERSION.txt @@ -0,0 +1,10 @@ +1,6c1 +< <<<<<<< HEAD +< 2.3.0-SNAPSHOT +< ||||||| 4479382ced +< ======= +< 2.2.3-SNAPSHOT +< >>>>>>> TEMP_RIGHT_BRANCH +--- +> 2.3.0-SNAPSHOT +\ No newline at end of file diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_histogram/diff_io.swagger.codegen.CodegenConfig.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_histogram/diff_io.swagger.codegen.CodegenConfig.txt new file mode 100644 index 0000000000..c523742933 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_histogram/diff_io.swagger.codegen.CodegenConfig.txt @@ -0,0 +1,20 @@ +==== +1:2c + io.swagger.codegen.languages.AspNet5ServerCodegen +2:2,8c + <<<<<<< HEAD + ||||||| 4479382ced + io.swagger.codegen.languages.AspNet5ServerCodegen + ======= + io.swagger.codegen.languages.ApexClientCodegen + io.swagger.codegen.languages.AspNet5ServerCodegen + >>>>>>> TEMP_RIGHT_BRANCH +3:2c + io.swagger.codegen.languages.ApexClientCodegen +====3 +1:67c +2:73c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen +\ No newline at end of file +3:67c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_ignorespace/diff_Models.swift.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_ignorespace/diff_Models.swift.txt new file mode 100644 index 0000000000..7b6be7aabd --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_ignorespace/diff_Models.swift.txt @@ -0,0 +1,2024 @@ +====1 +1:14c + case Error(Int, Data?, Error) +2:14,15c +3:14,15c + case HttpError(statusCode: Int, data: Data?, error: Error) + case DecodeError(response: Data?, decodeError: DecodeError) +====1 +1:37a +2:39,60c +3:39,60c + public enum Decoded { + case success(ValueType) + case failure(DecodeError) + } + + public extension Decoded { + var value: ValueType? { + switch self { + case let .success(value): + return value + case .failure: + return nil + } + } + } + + public enum DecodeError { + case typeMismatch(expected: String, actual: String) + case missingKey(key: String) + case parseError(message: String) + } + +====1 +1:42c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> T)) { +2:65c +3:65c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> Decoded)) { +====1 +1:47,50c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> T { + let key = discriminator; + if let decoder = decoders[key] { + return decoder(source, nil) as! T +2:70,73c +3:70,73c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> Decoded { + let key = discriminator + if let decoder = decoders[key], let value = decoder(source, nil) as? Decoded { + return value +====1 +1:52c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:75c +3:75c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:56,58c + static func decode(clazz: [T].Type, source: AnyObject) -> [T] { + let array = source as! [AnyObject] + return array.map { Decoders.decode(clazz: T.self, source: $0, instance: nil) } +2:79,93c +3:79,93c + static func decode(clazz: [T].Type, source: AnyObject) -> Decoded<[T]> { + if let sourceArray = source as? [AnyObject] { + var values = [T]() + for sourceValue in sourceArray { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): + values.append(value) + case let .failure(error): + return .failure(error) + } + } + return .success(values) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } +====1 +1:61,65c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> [Key:T] { + let sourceDictionary = source as! [Key: AnyObject] + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + dictionary[key] = Decoders.decode(clazz: T.self, source: value, instance: nil) +2:96,122c +3:96,122c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> Decoded<[Key:T]> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): + dictionary[key] = value + case let .failure(error): + return .failure(error) + } + } + return .success(dictionary) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } + } + + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + guard !(source is NSNull), source != nil else { return .success(nil) } + if let value = source as? T.RawValue { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "\(T.RawValue.self) matching a case from the enumeration \(T.self)", actual: String(describing: type(of: source)))) +====1 +1:67c + return dictionary +2:123a +3:123a +====1 +1:70c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> T { +2:126c +3:126c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> Decoded { +====1 +1:72,73c + if T.self is Int32.Type && source is NSNumber { + return (source as! NSNumber).int32Value as! T; +2:128,129c +3:128,129c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int32.Type { + return .success(value) +====1 +1:75,76c + if T.self is Int64.Type && source is NSNumber { + return source.int64Value as! T; +2:131,132c +3:131,132c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int64.Type { + return .success(value) +====1 +1:78,79c + if T.self is UUID.Type && source is String { + return UUID(uuidString: source as! String) as! T +2:134,135c +3:134,135c + if let intermediate = source as? String, let value = UUID(uuidString: intermediate) as? T, source is String, T.self is UUID.Type { + return .success(value) +====1 +1:81,82c + if source is T { + return source as! T +2:137,138c +3:137,138c + if let value = source as? T { + return .success(value) +====1 +1:84,85c + if T.self is Data.Type && source is String { + return Data(base64Encoded: source as! String) as! T +2:140,141c +3:140,141c + if let intermediate = source as? String, let value = Data(base64Encoded: intermediate) as? T { + return .success(value) +====1 +1:89,90c + if let decoder = decoders[key] { + return decoder(source, instance) as! T +2:145,146c +3:145,146c + if let decoder = decoders[key], let value = decoder(source, instance) as? Decoded { + return value +====1 +1:92c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:148c +3:148c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:96,102c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> T? { + if source is NSNull { + return nil + } + return source.map { (source: AnyObject) -> T in + Decoders.decode(clazz: clazz, source: source, instance: nil) + } +2:152,154c +3:152,154c + //Convert a Decoded so that its value is optional. DO WE STILL NEED THIS? + static func toOptional(decoded: Decoded) -> Decoded { + return .success(decoded.value) +====1 +1:105,107c + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> [T]? { + if source is NSNull { + return nil +2:157,164c +3:157,164c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + if let source = source, !(source is NSNull) { + switch Decoders.decode(clazz: clazz, source: source, instance: nil) { + case let .success(value): return .success(value) + case let .failure(error): return .failure(error) + } + } else { + return .success(nil) +====1 +1:109,110c + return source.map { (someSource: AnyObject) -> [T] in + Decoders.decode(clazz: clazz, source: someSource) +2:166,179c +3:166,179c + } + + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> Decoded<[T]?> { + if let source = source as? [AnyObject] { + var values = [T]() + for sourceValue in source { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): values.append(value) + case let .failure(error): return .failure(error) + } + } + return .success(values) + } else { + return .success(nil) +====1 +1:114,116c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> [Key:T]? { + if source is NSNull { + return nil +2:183,194c +3:183,194c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> Decoded<[Key:T]?> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): dictionary[key] = value + case let .failure(error): return .failure(error) + } + } + return .success(dictionary) + } else { + return .success(nil) +====1 +1:118,119c + return source.map { (someSource: AnyObject) -> [Key:T] in + Decoders.decode(clazz: clazz, source: someSource) +2:196,206c +3:196,206c + } + + static func decodeOptional(clazz: T, source: AnyObject) -> Decoded { + if let value = source as? U { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "String", actual: String(describing: type(of: source)))) +====1 +1:122a +2:210c +3:210c + +====1 +1:137c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Date in +2:225c +3:225c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:141c + return date +2:229c +3:229c + return .success(date) +====1 +1:145c + if let sourceInt = source as? Int64 { +2:233c +3:233c + if let sourceInt = source as? Int { +====1 +1:147c + return Date(timeIntervalSince1970: Double(sourceInt / 1000) ) +2:235,250c +3:235,250c + return .success(Date(timeIntervalSince1970: Double(sourceInt / 1000) )) + } + if source is String || source is Int { + return .failure(.parseError(message: "Could not decode date")) + } else { + return .failure(.typeMismatch(expected: "String or Int", actual: "\(source)")) + } + } + + // Decoder for ISOFullDate + Decoders.addDecoder(clazz: ISOFullDate.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let string = source as? String, + let isoDate = ISOFullDate.from(string: string) { + return .success(isoDate) + } else { + return .failure(.typeMismatch(expected: "ISO date", actual: "\(source)")) +====1 +1:149c + fatalError("formatter failed to parse \(source)") +2:251a +3:251a +====1 +1:152,155c + // Decoder for [AdditionalPropertiesClass] + Decoders.addDecoder(clazz: [AdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [AdditionalPropertiesClass] in + return Decoders.decode(clazz: [AdditionalPropertiesClass].self, source: source) + } +2:253a +3:253a +====1 +1:157,163c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> AdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + + result.mapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_property"] as AnyObject?) + result.mapOfMapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_of_map_property"] as AnyObject?) + return result +2:255,273c +3:255,273c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: [String:String].self, source: sourceDictionary["map_property"] as AnyObject?) { + + case let .success(value): result.mapProperty = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_of_map_property"] as AnyObject?) { + + case let .success(value): result.mapOfMapProperty = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "AdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:167,170c + // Decoder for [Animal] + Decoders.addDecoder(clazz: [Animal].self) { (source: AnyObject, instance: AnyObject?) -> [Animal] in + return Decoders.decode(clazz: [Animal].self, source: source) + } +2:276a +3:276a +====1 +1:172,176c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Animal in + let sourceDictionary = source as! [AnyHashable: Any] + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal" { + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) +2:278,299c +3:278,299c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal"{ + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) + } + let result = instance == nil ? Animal() : instance as! Animal + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Animal", actual: "\(source)")) +====1 +1:178,182c + let result = instance == nil ? Animal() : instance as! Animal + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + return result +2:300a +3:300a +====1 +1:186,194c + // Decoder for [AnimalFarm] + Decoders.addDecoder(clazz: [AnimalFarm].self) { (source: AnyObject, instance: AnyObject?) -> [AnimalFarm] in + return Decoders.decode(clazz: [AnimalFarm].self, source: source) + } + // Decoder for AnimalFarm + Decoders.addDecoder(clazz: AnimalFarm.self) { (source: AnyObject, instance: AnyObject?) -> AnimalFarm in + let sourceArray = source as! [AnyObject] + return sourceArray.map({ Decoders.decode(clazz: Animal.self, source: $0, instance: nil) }) + } +2:303a +3:303a +====1 +1:197,200c + // Decoder for [ApiResponse] + Decoders.addDecoder(clazz: [ApiResponse].self) { (source: AnyObject, instance: AnyObject?) -> [ApiResponse] in + return Decoders.decode(clazz: [ApiResponse].self, source: source) + } +2:306c +3:306c + +====1 +1:202,209c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> ApiResponse in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + + result.code = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) + result.type = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) + result.message = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) + return result +2:308,332c +3:308,332c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) { + + case let .success(value): result.code = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) { + + case let .success(value): result.type = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) { + + case let .success(value): result.message = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ApiResponse", actual: "\(source)")) + } +====1 +1:213,216c + // Decoder for [ArrayOfArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfArrayOfNumberOnly].self, source: source) + } +2:335a +3:335a +====1 +1:218,223c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + + result.arrayArrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) + return result +2:337,349c +3:337,349c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [[Double]].self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayArrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:227,230c + // Decoder for [ArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfNumberOnly].self, source: source) + } +2:352a +3:352a +====1 +1:232,237c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + + result.arrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayNumber"] as AnyObject?) + return result +2:354,366c +3:354,366c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [Double].self, source: sourceDictionary["ArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:241,244c + // Decoder for [ArrayTest] + Decoders.addDecoder(clazz: [ArrayTest].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayTest] in + return Decoders.decode(clazz: [ArrayTest].self, source: source) + } +2:369a +3:369a +====1 +1:246,253c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> ArrayTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + + result.arrayOfString = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_of_string"] as AnyObject?) + result.arrayArrayOfInteger = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) + result.arrayArrayOfModel = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_model"] as AnyObject?) + return result +2:371,395c +3:371,395c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["array_of_string"] as AnyObject?) { + + case let .success(value): result.arrayOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[Int64]].self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[ReadOnlyFirst]].self, source: sourceDictionary["array_array_of_model"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfModel = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayTest", actual: "\(source)")) + } +====1 +1:257,260c + // Decoder for [Capitalization] + Decoders.addDecoder(clazz: [Capitalization].self) { (source: AnyObject, instance: AnyObject?) -> [Capitalization] in + return Decoders.decode(clazz: [Capitalization].self, source: source) + } +2:398a +3:398a +====1 +1:262,272c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Capitalization in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Capitalization() : instance as! Capitalization + + result.smallCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) + result.capitalCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) + result.smallSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) + result.capitalSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) + result.sCAETHFlowPoints = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) + result.ATT_NAME = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) + return result +2:400,442c +3:400,442c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Capitalization() : instance as! Capitalization + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) { + + case let .success(value): result.smallCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) { + + case let .success(value): result.capitalCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) { + + case let .success(value): result.smallSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) { + + case let .success(value): result.capitalSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) { + + case let .success(value): result.sCAETHFlowPoints = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) { + + case let .success(value): result.ATT_NAME = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Capitalization", actual: "\(source)")) + } +====1 +1:276,279c + // Decoder for [Cat] + Decoders.addDecoder(clazz: [Cat].self) { (source: AnyObject, instance: AnyObject?) -> [Cat] in + return Decoders.decode(clazz: [Cat].self, source: source) + } +2:445a +3:445a +====1 +1:281,291c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Cat in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.declawed = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) + return result +2:447,474c +3:447,474c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) { + + case let .success(value): result.declawed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Cat", actual: "\(source)")) + } +====1 +1:295,298c + // Decoder for [Category] + Decoders.addDecoder(clazz: [Category].self) { (source: AnyObject, instance: AnyObject?) -> [Category] in + return Decoders.decode(clazz: [Category].self, source: source) + } +2:477a +3:477a +====1 +1:300,306c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Category in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Category() : instance as! Category + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:479,497c +3:479,497c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Category() : instance as! Category + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Category", actual: "\(source)")) + } +====1 +1:310,313c + // Decoder for [ClassModel] + Decoders.addDecoder(clazz: [ClassModel].self) { (source: AnyObject, instance: AnyObject?) -> [ClassModel] in + return Decoders.decode(clazz: [ClassModel].self, source: source) + } +2:500a +3:500a +====1 +1:315,320c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> ClassModel in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ClassModel() : instance as! ClassModel + + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) + return result +2:502,514c +3:502,514c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ClassModel() : instance as! ClassModel + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ClassModel", actual: "\(source)")) + } +====1 +1:324,327c + // Decoder for [Client] + Decoders.addDecoder(clazz: [Client].self) { (source: AnyObject, instance: AnyObject?) -> [Client] in + return Decoders.decode(clazz: [Client].self, source: source) + } +2:517a +3:517a +====1 +1:329,334c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Client in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Client() : instance as! Client + + result.client = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) + return result +2:519,531c +3:519,531c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Client() : instance as! Client + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) { + + case let .success(value): result.client = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Client", actual: "\(source)")) + } +====1 +1:338,341c + // Decoder for [Dog] + Decoders.addDecoder(clazz: [Dog].self) { (source: AnyObject, instance: AnyObject?) -> [Dog] in + return Decoders.decode(clazz: [Dog].self, source: source) + } +2:534a +3:534a +====1 +1:343,353c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Dog in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.breed = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) + return result +2:536,563c +3:536,563c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) { + + case let .success(value): result.breed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Dog", actual: "\(source)")) + } +====1 +1:357,360c + // Decoder for [EnumArrays] + Decoders.addDecoder(clazz: [EnumArrays].self) { (source: AnyObject, instance: AnyObject?) -> [EnumArrays] in + return Decoders.decode(clazz: [EnumArrays].self, source: source) + } +2:566a +3:566a +====1 +1:362,371c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> EnumArrays in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + + if let justSymbol = sourceDictionary["just_symbol"] as? String { + result.justSymbol = EnumArrays.JustSymbol(rawValue: (justSymbol)) + } + + if let arrayEnum = sourceDictionary["array_enum"] as? [String] { + result.arrayEnum = arrayEnum.map ({ EnumArrays.ArrayEnum(rawValue: $0)! }) +2:568,585c +3:568,585c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + switch Decoders.decodeOptional(clazz: EnumArrays.JustSymbol.self, source: sourceDictionary["just_symbol"] as AnyObject?) { + + case let .success(value): result.justSymbol = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_enum"] as AnyObject?) { + + case let .success(value): result.arrayEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumArrays", actual: "\(source)")) +====1 +1:373,374c + + return result +2:586a +3:586a +====1 +1:378,381c + // Decoder for [EnumClass] + Decoders.addDecoder(clazz: [EnumClass].self) { (source: AnyObject, instance: AnyObject?) -> [EnumClass] in + return Decoders.decode(clazz: [EnumClass].self, source: source) + } +2:589a +3:589a +====1 +1:383,389c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> EnumClass in + if let source = source as? String { + if let result = EnumClass(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type EnumClass: Maybe swagger file is insufficient") +2:591,593c +3:591,593c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: EnumClass.self, source: source, instance: instance) +====1 +1:393,396c + // Decoder for [EnumTest] + Decoders.addDecoder(clazz: [EnumTest].self) { (source: AnyObject, instance: AnyObject?) -> [EnumTest] in + return Decoders.decode(clazz: [EnumTest].self, source: source) + } +2:596a +3:596a +====1 +1:398,415c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> EnumTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumTest() : instance as! EnumTest + + if let enumString = sourceDictionary["enum_string"] as? String { + result.enumString = EnumTest.EnumString(rawValue: (enumString)) + } + + if let enumInteger = sourceDictionary["enum_integer"] as? Int32 { + result.enumInteger = EnumTest.EnumInteger(rawValue: (enumInteger)) + } + + if let enumNumber = sourceDictionary["enum_number"] as? Double { + result.enumNumber = EnumTest.EnumNumber(rawValue: (enumNumber)) + } + + result.outerEnum = Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) + return result +2:598,628c +3:598,628c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumTest() : instance as! EnumTest + switch Decoders.decodeOptional(clazz: EnumTest.EnumString.self, source: sourceDictionary["enum_string"] as AnyObject?) { + + case let .success(value): result.enumString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumInteger.self, source: sourceDictionary["enum_integer"] as AnyObject?) { + + case let .success(value): result.enumInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumNumber.self, source: sourceDictionary["enum_number"] as AnyObject?) { + + case let .success(value): result.enumNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) { + + case let .success(value): result.outerEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumTest", actual: "\(source)")) + } +====1 +1:419,422c + // Decoder for [FormatTest] + Decoders.addDecoder(clazz: [FormatTest].self) { (source: AnyObject, instance: AnyObject?) -> [FormatTest] in + return Decoders.decode(clazz: [FormatTest].self, source: source) + } +2:631a +3:631a +====1 +1:424,441c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> FormatTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? FormatTest() : instance as! FormatTest + + result.integer = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) + result.int32 = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) + result.int64 = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) + result.number = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) + result.float = Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) + result.double = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) + result.string = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) + result.byte = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) + result.binary = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) + result.date = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["date"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + return result +2:633,717c +3:633,717c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? FormatTest() : instance as! FormatTest + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) { + + case let .success(value): result.integer = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) { + + case let .success(value): result.int32 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) { + + case let .success(value): result.int64 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) { + + case let .success(value): result.number = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) { + + case let .success(value): result.float = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) { + + case let .success(value): result.double = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) { + + case let .success(value): result.string = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) { + + case let .success(value): result.byte = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) { + + case let .success(value): result.binary = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: ISOFullDate.self, source: sourceDictionary["date"] as AnyObject?) { + + case let .success(value): result.date = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "FormatTest", actual: "\(source)")) + } +====1 +1:445,448c + // Decoder for [HasOnlyReadOnly] + Decoders.addDecoder(clazz: [HasOnlyReadOnly].self) { (source: AnyObject, instance: AnyObject?) -> [HasOnlyReadOnly] in + return Decoders.decode(clazz: [HasOnlyReadOnly].self, source: source) + } +2:720a +3:720a +====1 +1:450,456c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> HasOnlyReadOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.foo = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) + return result +2:722,740c +3:722,740c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) { + + case let .success(value): result.foo = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "HasOnlyReadOnly", actual: "\(source)")) + } +====1 +1:460,463c + // Decoder for [List] + Decoders.addDecoder(clazz: [List].self) { (source: AnyObject, instance: AnyObject?) -> [List] in + return Decoders.decode(clazz: [List].self, source: source) + } +2:743a +3:743a +====1 +1:465,470c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> List in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? List() : instance as! List + + result._123List = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) + return result +2:745,757c +3:745,757c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? List() : instance as! List + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) { + + case let .success(value): result._123List = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "List", actual: "\(source)")) + } +====1 +1:474,477c + // Decoder for [MapTest] + Decoders.addDecoder(clazz: [MapTest].self) { (source: AnyObject, instance: AnyObject?) -> [MapTest] in + return Decoders.decode(clazz: [MapTest].self, source: source) + } +2:760a +3:760a +====1 +1:479,484c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> MapTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MapTest() : instance as! MapTest + + result.mapMapOfString = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_map_of_string"] as AnyObject?) + if let mapOfEnumString = sourceDictionary["map_of_enum_string"] as? [String:String] { //TODO: handle enum map scenario +2:762,779c +3:762,779c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MapTest() : instance as! MapTest + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_map_of_string"] as AnyObject?) { + + case let .success(value): result.mapMapOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: MapTest.MapOfEnumString.self, source: sourceDictionary["map_of_enum_string"] as AnyObject?) { + /* + case let .success(value): result.mapOfEnumString = value + case let .failure(error): return .failure(error) + */ default: break //TODO: handle enum map scenario + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MapTest", actual: "\(source)")) +====1 +1:486,487c + + return result +2:780a +3:780a +====1 +1:491,494c + // Decoder for [MixedPropertiesAndAdditionalPropertiesClass] + Decoders.addDecoder(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [MixedPropertiesAndAdditionalPropertiesClass] in + return Decoders.decode(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self, source: source) + } +2:783a +3:783a +====1 +1:496,503c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> MixedPropertiesAndAdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.map = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map"] as AnyObject?) + return result +2:785,809c +3:785,809c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:Animal].self, source: sourceDictionary["map"] as AnyObject?) { + + case let .success(value): result.map = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MixedPropertiesAndAdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:507,510c + // Decoder for [Model200Response] + Decoders.addDecoder(clazz: [Model200Response].self) { (source: AnyObject, instance: AnyObject?) -> [Model200Response] in + return Decoders.decode(clazz: [Model200Response].self, source: source) + } +2:812a +3:812a +====1 +1:512,518c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Model200Response in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Model200Response() : instance as! Model200Response + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) + return result +2:814,832c +3:814,832c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Model200Response() : instance as! Model200Response + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Model200Response", actual: "\(source)")) + } +====1 +1:522,525c + // Decoder for [Name] + Decoders.addDecoder(clazz: [Name].self) { (source: AnyObject, instance: AnyObject?) -> [Name] in + return Decoders.decode(clazz: [Name].self, source: source) + } +2:835a +3:835a +====1 +1:527,535c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Name in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Name() : instance as! Name + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result.snakeCase = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) + result.property = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) + result._123Number = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) + return result +2:837,867c +3:837,867c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Name() : instance as! Name + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) { + + case let .success(value): result.snakeCase = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) { + + case let .success(value): result.property = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) { + + case let .success(value): result._123Number = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Name", actual: "\(source)")) + } +====1 +1:539,542c + // Decoder for [NumberOnly] + Decoders.addDecoder(clazz: [NumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [NumberOnly] in + return Decoders.decode(clazz: [NumberOnly].self, source: source) + } +2:870a +3:870a +====1 +1:544,549c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> NumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + + result.justNumber = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) + return result +2:872,884c +3:872,884c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) { + + case let .success(value): result.justNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "NumberOnly", actual: "\(source)")) + } +====1 +1:553,556c + // Decoder for [Order] + Decoders.addDecoder(clazz: [Order].self) { (source: AnyObject, instance: AnyObject?) -> [Order] in + return Decoders.decode(clazz: [Order].self, source: source) + } +2:887a +3:887a +==== +1:558,571c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Order in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Order() : instance as! Order + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.petId = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) + result.quantity = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) + result.shipDate = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Order.Status(rawValue: (status)) + } + + result.complete = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) + return result +2:889,941c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Order() : instance as! Order + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) { + + case let .success(value): result.petId = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) { + + case let .success(value): result.quantity = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) { + + case let .success(value): result.shipDate = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Order.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) { + + case let .success(value): result.complete = value + case let .failure(error): return .failure(error) + + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + ======= + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject, instance: AnyObject?) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + >>>>>>> TEMP_RIGHT_BRANCH + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Order", actual: "\(source)")) + } +3:889,931c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Order() : instance as! Order + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) { + + case let .success(value): result.petId = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) { + + case let .success(value): result.quantity = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) { + + case let .success(value): result.shipDate = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Order.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) { + + case let .success(value): result.complete = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Order", actual: "\(source)")) + } +====1 +1:575,578c + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } +2:944a +3:934a +==== +1:580c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject) -> OuterBoolean in +2:946,952c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject) -> OuterBoolean in + ======= + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> OuterBoolean in + >>>>>>> TEMP_RIGHT_BRANCH +3:936c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:582c + return source +2:954,956c +3:938,940c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterBoolean", actual: "\(source)")) +====1 +1:584c + fatalError("Source \(source) is not convertible to typealias OuterBoolean: Maybe swagger file is insufficient") +2:957a +3:941a +==== +1:588,591c + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } +2:961,972c + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } + ======= + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject, instance: AnyObject?) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } + >>>>>>> TEMP_RIGHT_BRANCH +3:944a +==== +1:593,600c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + + let instance = OuterComposite() + instance.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + instance.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + instance.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return instance +2:974,1020c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + switch Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) { + + case let .success(value): result.myNumber = value + case let .failure(error): return .failure(error) + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + ======= + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + >>>>>>> TEMP_RIGHT_BRANCH + + <<<<<<< HEAD + ||||||| 4479382ced + let instance = OuterComposite() + instance.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + instance.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + instance.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return instance + ======= + result.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + result.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + result.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return result + >>>>>>> TEMP_RIGHT_BRANCH + } + switch Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) { + + case let .success(value): result.myString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) { + + case let .success(value): result.myBoolean = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "OuterComposite", actual: "\(source)")) + } +3:946,970c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + switch Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) { + + case let .success(value): result.myNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) { + + case let .success(value): result.myString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) { + + case let .success(value): result.myBoolean = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "OuterComposite", actual: "\(source)")) + } +==== +1:604,607c + // Decoder for [OuterEnum] + Decoders.addDecoder(clazz: [OuterEnum].self) { (source: AnyObject, instance: AnyObject?) -> [OuterEnum] in + return Decoders.decode(clazz: [OuterEnum].self, source: source) + } +2:1024c + <<<<<<< HEAD +3:973a +==== +1:609,615c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> OuterEnum in + if let source = source as? String { + if let result = OuterEnum(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type OuterEnum: Maybe swagger file is insufficient") +2:1026,1037c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: OuterEnum.self, source: source, instance: instance) + ||||||| 4479382ced + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + ======= + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject, instance: AnyObject?) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + >>>>>>> TEMP_RIGHT_BRANCH +3:975,977c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: OuterEnum.self, source: source, instance: instance) +====1 +1:619,622c + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + } +2:1040a +3:980a +==== +1:624c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject) -> OuterNumber in +2:1042,1048c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject) -> OuterNumber in + ======= + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> OuterNumber in + >>>>>>> TEMP_RIGHT_BRANCH +3:982c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:626c + return source +2:1050,1052c +3:984,986c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterNumber", actual: "\(source)")) +====1 +1:628c + fatalError("Source \(source) is not convertible to typealias OuterNumber: Maybe swagger file is insufficient") +2:1053a +3:987a +==== +1:632,635c + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } +2:1057,1068c + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } + ======= + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject, instance: AnyObject?) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } + >>>>>>> TEMP_RIGHT_BRANCH +3:990a +==== +1:637c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject) -> OuterString in +2:1070,1076c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject) -> OuterString in + ======= + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> OuterString in + >>>>>>> TEMP_RIGHT_BRANCH +3:992c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:639c + return source +2:1078,1080c +3:994,996c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterString", actual: "\(source)")) +====1 +1:641c + fatalError("Source \(source) is not convertible to typealias OuterString: Maybe swagger file is insufficient") +2:1081a +3:997a +====1 +1:645,648c + // Decoder for [Pet] + Decoders.addDecoder(clazz: [Pet].self) { (source: AnyObject, instance: AnyObject?) -> [Pet] in + return Decoders.decode(clazz: [Pet].self, source: source) + } +2:1084a +3:1000a +====1 +1:650,663c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Pet in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Pet() : instance as! Pet + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.category = Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + result.photoUrls = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["photoUrls"] as AnyObject?) + result.tags = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["tags"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Pet.Status(rawValue: (status)) + } + + return result +2:1086,1128c +3:1002,1044c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Pet() : instance as! Pet + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) { + + case let .success(value): result.category = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["photoUrls"] as AnyObject?) { + + case let .success(value): result.photoUrls = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [Tag].self, source: sourceDictionary["tags"] as AnyObject?) { + + case let .success(value): result.tags = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Pet.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Pet", actual: "\(source)")) + } +====1 +1:667,670c + // Decoder for [ReadOnlyFirst] + Decoders.addDecoder(clazz: [ReadOnlyFirst].self) { (source: AnyObject, instance: AnyObject?) -> [ReadOnlyFirst] in + return Decoders.decode(clazz: [ReadOnlyFirst].self, source: source) + } +2:1131a +3:1047a +====1 +1:672,678c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> ReadOnlyFirst in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.baz = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) + return result +2:1133,1151c +3:1049,1067c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) { + + case let .success(value): result.baz = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ReadOnlyFirst", actual: "\(source)")) + } +====1 +1:682,685c + // Decoder for [Return] + Decoders.addDecoder(clazz: [Return].self) { (source: AnyObject, instance: AnyObject?) -> [Return] in + return Decoders.decode(clazz: [Return].self, source: source) + } +2:1154a +3:1070a +====1 +1:687,692c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Return in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Return() : instance as! Return + + result._return = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) + return result +2:1156,1168c +3:1072,1084c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Return() : instance as! Return + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) { + + case let .success(value): result._return = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Return", actual: "\(source)")) + } +====1 +1:696,699c + // Decoder for [SpecialModelName] + Decoders.addDecoder(clazz: [SpecialModelName].self) { (source: AnyObject, instance: AnyObject?) -> [SpecialModelName] in + return Decoders.decode(clazz: [SpecialModelName].self, source: source) + } +2:1171a +3:1087a +====1 +1:701,706c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> SpecialModelName in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + + result.specialPropertyName = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) + return result +2:1173,1185c +3:1089,1101c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) { + + case let .success(value): result.specialPropertyName = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "SpecialModelName", actual: "\(source)")) + } +====1 +1:710,713c + // Decoder for [Tag] + Decoders.addDecoder(clazz: [Tag].self) { (source: AnyObject, instance: AnyObject?) -> [Tag] in + return Decoders.decode(clazz: [Tag].self, source: source) + } +2:1188a +3:1104a +====1 +1:715,721c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Tag in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Tag() : instance as! Tag + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:1190,1208c +3:1106,1124c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Tag() : instance as! Tag + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Tag", actual: "\(source)")) + } +====1 +1:725,728c + // Decoder for [User] + Decoders.addDecoder(clazz: [User].self) { (source: AnyObject, instance: AnyObject?) -> [User] in + return Decoders.decode(clazz: [User].self, source: source) + } +2:1211a +3:1127a +====1 +1:730,742c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> User in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? User() : instance as! User + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.username = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) + result.firstName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) + result.lastName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) + result.email = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + result.phone = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) + result.userStatus = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) + return result +2:1213,1267c +3:1129,1183c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? User() : instance as! User + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) { + + case let .success(value): result.username = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) { + + case let .success(value): result.firstName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) { + + case let .success(value): result.lastName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) { + + case let .success(value): result.email = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) { + + case let .success(value): result.phone = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) { + + case let .success(value): result.userStatus = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "User", actual: "\(source)")) + } +====1 +1:749c + } +\ No newline at end of file +2:1274c +3:1190c + } diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_ignorespace/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_ignorespace/diff_VERSION.txt new file mode 100644 index 0000000000..ce107c7353 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_ignorespace/diff_VERSION.txt @@ -0,0 +1,10 @@ +1,6c1 +< <<<<<<< HEAD +< 2.3.0-SNAPSHOT +< ||||||| 4479382ced +< ======= +< 2.2.3-SNAPSHOT +< >>>>>>> TEMP_RIGHT_BRANCH +--- +> 2.3.0-SNAPSHOT +\ No newline at end of file diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_ignorespace/diff_io.swagger.codegen.CodegenConfig.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_ignorespace/diff_io.swagger.codegen.CodegenConfig.txt new file mode 100644 index 0000000000..c523742933 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_ignorespace/diff_io.swagger.codegen.CodegenConfig.txt @@ -0,0 +1,20 @@ +==== +1:2c + io.swagger.codegen.languages.AspNet5ServerCodegen +2:2,8c + <<<<<<< HEAD + ||||||| 4479382ced + io.swagger.codegen.languages.AspNet5ServerCodegen + ======= + io.swagger.codegen.languages.ApexClientCodegen + io.swagger.codegen.languages.AspNet5ServerCodegen + >>>>>>> TEMP_RIGHT_BRANCH +3:2c + io.swagger.codegen.languages.ApexClientCodegen +====3 +1:67c +2:73c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen +\ No newline at end of file +3:67c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_minimal/diff_Models.swift.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_minimal/diff_Models.swift.txt new file mode 100644 index 0000000000..d44eaa624c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_minimal/diff_Models.swift.txt @@ -0,0 +1,1980 @@ +====1 +1:14c + case Error(Int, Data?, Error) +2:14,15c +3:14,15c + case HttpError(statusCode: Int, data: Data?, error: Error) + case DecodeError(response: Data?, decodeError: DecodeError) +====1 +1:37a +2:39,60c +3:39,60c + public enum Decoded { + case success(ValueType) + case failure(DecodeError) + } + + public extension Decoded { + var value: ValueType? { + switch self { + case let .success(value): + return value + case .failure: + return nil + } + } + } + + public enum DecodeError { + case typeMismatch(expected: String, actual: String) + case missingKey(key: String) + case parseError(message: String) + } + +====1 +1:42c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> T)) { +2:65c +3:65c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> Decoded)) { +====1 +1:47,50c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> T { + let key = discriminator; + if let decoder = decoders[key] { + return decoder(source, nil) as! T +2:70,73c +3:70,73c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> Decoded { + let key = discriminator + if let decoder = decoders[key], let value = decoder(source, nil) as? Decoded { + return value +====1 +1:52c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:75c +3:75c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:56,58c + static func decode(clazz: [T].Type, source: AnyObject) -> [T] { + let array = source as! [AnyObject] + return array.map { Decoders.decode(clazz: T.self, source: $0, instance: nil) } +2:79,93c +3:79,93c + static func decode(clazz: [T].Type, source: AnyObject) -> Decoded<[T]> { + if let sourceArray = source as? [AnyObject] { + var values = [T]() + for sourceValue in sourceArray { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): + values.append(value) + case let .failure(error): + return .failure(error) + } + } + return .success(values) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } +====1 +1:61,65c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> [Key:T] { + let sourceDictionary = source as! [Key: AnyObject] + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + dictionary[key] = Decoders.decode(clazz: T.self, source: value, instance: nil) +2:96,122c +3:96,122c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> Decoded<[Key:T]> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): + dictionary[key] = value + case let .failure(error): + return .failure(error) + } + } + return .success(dictionary) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } + } + + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + guard !(source is NSNull), source != nil else { return .success(nil) } + if let value = source as? T.RawValue { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "\(T.RawValue.self) matching a case from the enumeration \(T.self)", actual: String(describing: type(of: source)))) +====1 +1:67c + return dictionary +2:123a +3:123a +====1 +1:70c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> T { +2:126c +3:126c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> Decoded { +====1 +1:72,73c + if T.self is Int32.Type && source is NSNumber { + return (source as! NSNumber).int32Value as! T; +2:128,129c +3:128,129c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int32.Type { + return .success(value) +====1 +1:75,76c + if T.self is Int64.Type && source is NSNumber { + return source.int64Value as! T; +2:131,132c +3:131,132c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int64.Type { + return .success(value) +====1 +1:78,79c + if T.self is UUID.Type && source is String { + return UUID(uuidString: source as! String) as! T +2:134,135c +3:134,135c + if let intermediate = source as? String, let value = UUID(uuidString: intermediate) as? T, source is String, T.self is UUID.Type { + return .success(value) +====1 +1:81,82c + if source is T { + return source as! T +2:137,138c +3:137,138c + if let value = source as? T { + return .success(value) +====1 +1:84,85c + if T.self is Data.Type && source is String { + return Data(base64Encoded: source as! String) as! T +2:140,141c +3:140,141c + if let intermediate = source as? String, let value = Data(base64Encoded: intermediate) as? T { + return .success(value) +====1 +1:89,90c + if let decoder = decoders[key] { + return decoder(source, instance) as! T +2:145,146c +3:145,146c + if let decoder = decoders[key], let value = decoder(source, instance) as? Decoded { + return value +====1 +1:92c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:148c +3:148c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:96,102c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> T? { + if source is NSNull { + return nil + } + return source.map { (source: AnyObject) -> T in + Decoders.decode(clazz: clazz, source: source, instance: nil) + } +2:152,154c +3:152,154c + //Convert a Decoded so that its value is optional. DO WE STILL NEED THIS? + static func toOptional(decoded: Decoded) -> Decoded { + return .success(decoded.value) +====1 +1:105,107c + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> [T]? { + if source is NSNull { + return nil +2:157,164c +3:157,164c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + if let source = source, !(source is NSNull) { + switch Decoders.decode(clazz: clazz, source: source, instance: nil) { + case let .success(value): return .success(value) + case let .failure(error): return .failure(error) + } + } else { + return .success(nil) +====1 +1:109,110c + return source.map { (someSource: AnyObject) -> [T] in + Decoders.decode(clazz: clazz, source: someSource) +2:166,179c +3:166,179c + } + + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> Decoded<[T]?> { + if let source = source as? [AnyObject] { + var values = [T]() + for sourceValue in source { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): values.append(value) + case let .failure(error): return .failure(error) + } + } + return .success(values) + } else { + return .success(nil) +====1 +1:114,116c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> [Key:T]? { + if source is NSNull { + return nil +2:183,194c +3:183,194c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> Decoded<[Key:T]?> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): dictionary[key] = value + case let .failure(error): return .failure(error) + } + } + return .success(dictionary) + } else { + return .success(nil) +====1 +1:118,119c + return source.map { (someSource: AnyObject) -> [Key:T] in + Decoders.decode(clazz: clazz, source: someSource) +2:196,206c +3:196,206c + } + + static func decodeOptional(clazz: T, source: AnyObject) -> Decoded { + if let value = source as? U { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "String", actual: String(describing: type(of: source)))) +====1 +1:122a +2:210c +3:210c + +====1 +1:137c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Date in +2:225c +3:225c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:141c + return date +2:229c +3:229c + return .success(date) +====1 +1:145c + if let sourceInt = source as? Int64 { +2:233c +3:233c + if let sourceInt = source as? Int { +====1 +1:147c + return Date(timeIntervalSince1970: Double(sourceInt / 1000) ) +2:235,250c +3:235,250c + return .success(Date(timeIntervalSince1970: Double(sourceInt / 1000) )) + } + if source is String || source is Int { + return .failure(.parseError(message: "Could not decode date")) + } else { + return .failure(.typeMismatch(expected: "String or Int", actual: "\(source)")) + } + } + + // Decoder for ISOFullDate + Decoders.addDecoder(clazz: ISOFullDate.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let string = source as? String, + let isoDate = ISOFullDate.from(string: string) { + return .success(isoDate) + } else { + return .failure(.typeMismatch(expected: "ISO date", actual: "\(source)")) +====1 +1:149c + fatalError("formatter failed to parse \(source)") +2:251a +3:251a +====1 +1:152,155c + // Decoder for [AdditionalPropertiesClass] + Decoders.addDecoder(clazz: [AdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [AdditionalPropertiesClass] in + return Decoders.decode(clazz: [AdditionalPropertiesClass].self, source: source) + } +2:253a +3:253a +====1 +1:157,163c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> AdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + + result.mapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_property"] as AnyObject?) + result.mapOfMapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_of_map_property"] as AnyObject?) + return result +2:255,273c +3:255,273c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: [String:String].self, source: sourceDictionary["map_property"] as AnyObject?) { + + case let .success(value): result.mapProperty = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_of_map_property"] as AnyObject?) { + + case let .success(value): result.mapOfMapProperty = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "AdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:167,170c + // Decoder for [Animal] + Decoders.addDecoder(clazz: [Animal].self) { (source: AnyObject, instance: AnyObject?) -> [Animal] in + return Decoders.decode(clazz: [Animal].self, source: source) + } +2:276a +3:276a +====1 +1:172,176c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Animal in + let sourceDictionary = source as! [AnyHashable: Any] + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal" { + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) +2:278,299c +3:278,299c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal"{ + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) + } + let result = instance == nil ? Animal() : instance as! Animal + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Animal", actual: "\(source)")) +====1 +1:178,182c + let result = instance == nil ? Animal() : instance as! Animal + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + return result +2:300a +3:300a +====1 +1:186,194c + // Decoder for [AnimalFarm] + Decoders.addDecoder(clazz: [AnimalFarm].self) { (source: AnyObject, instance: AnyObject?) -> [AnimalFarm] in + return Decoders.decode(clazz: [AnimalFarm].self, source: source) + } + // Decoder for AnimalFarm + Decoders.addDecoder(clazz: AnimalFarm.self) { (source: AnyObject, instance: AnyObject?) -> AnimalFarm in + let sourceArray = source as! [AnyObject] + return sourceArray.map({ Decoders.decode(clazz: Animal.self, source: $0, instance: nil) }) + } +2:303a +3:303a +====1 +1:197,200c + // Decoder for [ApiResponse] + Decoders.addDecoder(clazz: [ApiResponse].self) { (source: AnyObject, instance: AnyObject?) -> [ApiResponse] in + return Decoders.decode(clazz: [ApiResponse].self, source: source) + } +2:306c +3:306c + +====1 +1:202,209c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> ApiResponse in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + + result.code = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) + result.type = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) + result.message = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) + return result +2:308,332c +3:308,332c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) { + + case let .success(value): result.code = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) { + + case let .success(value): result.type = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) { + + case let .success(value): result.message = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ApiResponse", actual: "\(source)")) + } +====1 +1:213,216c + // Decoder for [ArrayOfArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfArrayOfNumberOnly].self, source: source) + } +2:335a +3:335a +====1 +1:218,223c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + + result.arrayArrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) + return result +2:337,349c +3:337,349c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [[Double]].self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayArrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:227,230c + // Decoder for [ArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfNumberOnly].self, source: source) + } +2:352a +3:352a +====1 +1:232,237c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + + result.arrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayNumber"] as AnyObject?) + return result +2:354,366c +3:354,366c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [Double].self, source: sourceDictionary["ArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:241,244c + // Decoder for [ArrayTest] + Decoders.addDecoder(clazz: [ArrayTest].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayTest] in + return Decoders.decode(clazz: [ArrayTest].self, source: source) + } +2:369a +3:369a +====1 +1:246,253c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> ArrayTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + + result.arrayOfString = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_of_string"] as AnyObject?) + result.arrayArrayOfInteger = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) + result.arrayArrayOfModel = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_model"] as AnyObject?) + return result +2:371,395c +3:371,395c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["array_of_string"] as AnyObject?) { + + case let .success(value): result.arrayOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[Int64]].self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[ReadOnlyFirst]].self, source: sourceDictionary["array_array_of_model"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfModel = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayTest", actual: "\(source)")) + } +====1 +1:257,260c + // Decoder for [Capitalization] + Decoders.addDecoder(clazz: [Capitalization].self) { (source: AnyObject, instance: AnyObject?) -> [Capitalization] in + return Decoders.decode(clazz: [Capitalization].self, source: source) + } +2:398a +3:398a +====1 +1:262,272c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Capitalization in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Capitalization() : instance as! Capitalization + + result.smallCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) + result.capitalCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) + result.smallSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) + result.capitalSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) + result.sCAETHFlowPoints = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) + result.ATT_NAME = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) + return result +2:400,442c +3:400,442c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Capitalization() : instance as! Capitalization + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) { + + case let .success(value): result.smallCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) { + + case let .success(value): result.capitalCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) { + + case let .success(value): result.smallSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) { + + case let .success(value): result.capitalSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) { + + case let .success(value): result.sCAETHFlowPoints = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) { + + case let .success(value): result.ATT_NAME = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Capitalization", actual: "\(source)")) + } +====1 +1:276,279c + // Decoder for [Cat] + Decoders.addDecoder(clazz: [Cat].self) { (source: AnyObject, instance: AnyObject?) -> [Cat] in + return Decoders.decode(clazz: [Cat].self, source: source) + } +2:445a +3:445a +====1 +1:281,291c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Cat in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.declawed = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) + return result +2:447,474c +3:447,474c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) { + + case let .success(value): result.declawed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Cat", actual: "\(source)")) + } +====1 +1:295,298c + // Decoder for [Category] + Decoders.addDecoder(clazz: [Category].self) { (source: AnyObject, instance: AnyObject?) -> [Category] in + return Decoders.decode(clazz: [Category].self, source: source) + } +2:477a +3:477a +====1 +1:300,306c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Category in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Category() : instance as! Category + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:479,497c +3:479,497c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Category() : instance as! Category + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Category", actual: "\(source)")) + } +====1 +1:310,313c + // Decoder for [ClassModel] + Decoders.addDecoder(clazz: [ClassModel].self) { (source: AnyObject, instance: AnyObject?) -> [ClassModel] in + return Decoders.decode(clazz: [ClassModel].self, source: source) + } +2:500a +3:500a +====1 +1:315,320c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> ClassModel in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ClassModel() : instance as! ClassModel + + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) + return result +2:502,514c +3:502,514c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ClassModel() : instance as! ClassModel + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ClassModel", actual: "\(source)")) + } +====1 +1:324,327c + // Decoder for [Client] + Decoders.addDecoder(clazz: [Client].self) { (source: AnyObject, instance: AnyObject?) -> [Client] in + return Decoders.decode(clazz: [Client].self, source: source) + } +2:517a +3:517a +====1 +1:329,334c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Client in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Client() : instance as! Client + + result.client = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) + return result +2:519,531c +3:519,531c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Client() : instance as! Client + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) { + + case let .success(value): result.client = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Client", actual: "\(source)")) + } +====1 +1:338,341c + // Decoder for [Dog] + Decoders.addDecoder(clazz: [Dog].self) { (source: AnyObject, instance: AnyObject?) -> [Dog] in + return Decoders.decode(clazz: [Dog].self, source: source) + } +2:534a +3:534a +====1 +1:343,353c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Dog in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.breed = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) + return result +2:536,563c +3:536,563c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) { + + case let .success(value): result.breed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Dog", actual: "\(source)")) + } +====1 +1:357,360c + // Decoder for [EnumArrays] + Decoders.addDecoder(clazz: [EnumArrays].self) { (source: AnyObject, instance: AnyObject?) -> [EnumArrays] in + return Decoders.decode(clazz: [EnumArrays].self, source: source) + } +2:566a +3:566a +====1 +1:362,371c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> EnumArrays in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + + if let justSymbol = sourceDictionary["just_symbol"] as? String { + result.justSymbol = EnumArrays.JustSymbol(rawValue: (justSymbol)) + } + + if let arrayEnum = sourceDictionary["array_enum"] as? [String] { + result.arrayEnum = arrayEnum.map ({ EnumArrays.ArrayEnum(rawValue: $0)! }) +2:568,585c +3:568,585c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + switch Decoders.decodeOptional(clazz: EnumArrays.JustSymbol.self, source: sourceDictionary["just_symbol"] as AnyObject?) { + + case let .success(value): result.justSymbol = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_enum"] as AnyObject?) { + + case let .success(value): result.arrayEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumArrays", actual: "\(source)")) +====1 +1:373,374c + + return result +2:586a +3:586a +====1 +1:378,381c + // Decoder for [EnumClass] + Decoders.addDecoder(clazz: [EnumClass].self) { (source: AnyObject, instance: AnyObject?) -> [EnumClass] in + return Decoders.decode(clazz: [EnumClass].self, source: source) + } +2:589a +3:589a +====1 +1:383,389c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> EnumClass in + if let source = source as? String { + if let result = EnumClass(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type EnumClass: Maybe swagger file is insufficient") +2:591,593c +3:591,593c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: EnumClass.self, source: source, instance: instance) +====1 +1:393,396c + // Decoder for [EnumTest] + Decoders.addDecoder(clazz: [EnumTest].self) { (source: AnyObject, instance: AnyObject?) -> [EnumTest] in + return Decoders.decode(clazz: [EnumTest].self, source: source) + } +2:596a +3:596a +====1 +1:398,415c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> EnumTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumTest() : instance as! EnumTest + + if let enumString = sourceDictionary["enum_string"] as? String { + result.enumString = EnumTest.EnumString(rawValue: (enumString)) + } + + if let enumInteger = sourceDictionary["enum_integer"] as? Int32 { + result.enumInteger = EnumTest.EnumInteger(rawValue: (enumInteger)) + } + + if let enumNumber = sourceDictionary["enum_number"] as? Double { + result.enumNumber = EnumTest.EnumNumber(rawValue: (enumNumber)) + } + + result.outerEnum = Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) + return result +2:598,628c +3:598,628c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumTest() : instance as! EnumTest + switch Decoders.decodeOptional(clazz: EnumTest.EnumString.self, source: sourceDictionary["enum_string"] as AnyObject?) { + + case let .success(value): result.enumString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumInteger.self, source: sourceDictionary["enum_integer"] as AnyObject?) { + + case let .success(value): result.enumInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumNumber.self, source: sourceDictionary["enum_number"] as AnyObject?) { + + case let .success(value): result.enumNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) { + + case let .success(value): result.outerEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumTest", actual: "\(source)")) + } +====1 +1:419,422c + // Decoder for [FormatTest] + Decoders.addDecoder(clazz: [FormatTest].self) { (source: AnyObject, instance: AnyObject?) -> [FormatTest] in + return Decoders.decode(clazz: [FormatTest].self, source: source) + } +2:631a +3:631a +====1 +1:424,441c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> FormatTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? FormatTest() : instance as! FormatTest + + result.integer = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) + result.int32 = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) + result.int64 = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) + result.number = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) + result.float = Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) + result.double = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) + result.string = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) + result.byte = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) + result.binary = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) + result.date = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["date"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + return result +2:633,717c +3:633,717c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? FormatTest() : instance as! FormatTest + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) { + + case let .success(value): result.integer = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) { + + case let .success(value): result.int32 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) { + + case let .success(value): result.int64 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) { + + case let .success(value): result.number = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) { + + case let .success(value): result.float = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) { + + case let .success(value): result.double = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) { + + case let .success(value): result.string = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) { + + case let .success(value): result.byte = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) { + + case let .success(value): result.binary = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: ISOFullDate.self, source: sourceDictionary["date"] as AnyObject?) { + + case let .success(value): result.date = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "FormatTest", actual: "\(source)")) + } +====1 +1:445,448c + // Decoder for [HasOnlyReadOnly] + Decoders.addDecoder(clazz: [HasOnlyReadOnly].self) { (source: AnyObject, instance: AnyObject?) -> [HasOnlyReadOnly] in + return Decoders.decode(clazz: [HasOnlyReadOnly].self, source: source) + } +2:720a +3:720a +====1 +1:450,456c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> HasOnlyReadOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.foo = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) + return result +2:722,740c +3:722,740c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) { + + case let .success(value): result.foo = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "HasOnlyReadOnly", actual: "\(source)")) + } +====1 +1:460,463c + // Decoder for [List] + Decoders.addDecoder(clazz: [List].self) { (source: AnyObject, instance: AnyObject?) -> [List] in + return Decoders.decode(clazz: [List].self, source: source) + } +2:743a +3:743a +====1 +1:465,470c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> List in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? List() : instance as! List + + result._123List = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) + return result +2:745,757c +3:745,757c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? List() : instance as! List + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) { + + case let .success(value): result._123List = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "List", actual: "\(source)")) + } +====1 +1:474,477c + // Decoder for [MapTest] + Decoders.addDecoder(clazz: [MapTest].self) { (source: AnyObject, instance: AnyObject?) -> [MapTest] in + return Decoders.decode(clazz: [MapTest].self, source: source) + } +2:760a +3:760a +====1 +1:479,484c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> MapTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MapTest() : instance as! MapTest + + result.mapMapOfString = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_map_of_string"] as AnyObject?) + if let mapOfEnumString = sourceDictionary["map_of_enum_string"] as? [String:String] { //TODO: handle enum map scenario +2:762,779c +3:762,779c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MapTest() : instance as! MapTest + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_map_of_string"] as AnyObject?) { + + case let .success(value): result.mapMapOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: MapTest.MapOfEnumString.self, source: sourceDictionary["map_of_enum_string"] as AnyObject?) { + /* + case let .success(value): result.mapOfEnumString = value + case let .failure(error): return .failure(error) + */ default: break //TODO: handle enum map scenario + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MapTest", actual: "\(source)")) +====1 +1:486,487c + + return result +2:780a +3:780a +====1 +1:491,494c + // Decoder for [MixedPropertiesAndAdditionalPropertiesClass] + Decoders.addDecoder(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [MixedPropertiesAndAdditionalPropertiesClass] in + return Decoders.decode(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self, source: source) + } +2:783a +3:783a +====1 +1:496,503c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> MixedPropertiesAndAdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.map = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map"] as AnyObject?) + return result +2:785,809c +3:785,809c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:Animal].self, source: sourceDictionary["map"] as AnyObject?) { + + case let .success(value): result.map = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MixedPropertiesAndAdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:507,510c + // Decoder for [Model200Response] + Decoders.addDecoder(clazz: [Model200Response].self) { (source: AnyObject, instance: AnyObject?) -> [Model200Response] in + return Decoders.decode(clazz: [Model200Response].self, source: source) + } +2:812a +3:812a +====1 +1:512,518c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Model200Response in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Model200Response() : instance as! Model200Response + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) + return result +2:814,832c +3:814,832c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Model200Response() : instance as! Model200Response + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Model200Response", actual: "\(source)")) + } +====1 +1:522,525c + // Decoder for [Name] + Decoders.addDecoder(clazz: [Name].self) { (source: AnyObject, instance: AnyObject?) -> [Name] in + return Decoders.decode(clazz: [Name].self, source: source) + } +2:835a +3:835a +====1 +1:527,535c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Name in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Name() : instance as! Name + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result.snakeCase = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) + result.property = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) + result._123Number = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) + return result +2:837,867c +3:837,867c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Name() : instance as! Name + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) { + + case let .success(value): result.snakeCase = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) { + + case let .success(value): result.property = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) { + + case let .success(value): result._123Number = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Name", actual: "\(source)")) + } +====1 +1:539,542c + // Decoder for [NumberOnly] + Decoders.addDecoder(clazz: [NumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [NumberOnly] in + return Decoders.decode(clazz: [NumberOnly].self, source: source) + } +2:870a +3:870a +====1 +1:544,549c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> NumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + + result.justNumber = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) + return result +2:872,884c +3:872,884c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) { + + case let .success(value): result.justNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "NumberOnly", actual: "\(source)")) + } +====1 +1:553,556c + // Decoder for [Order] + Decoders.addDecoder(clazz: [Order].self) { (source: AnyObject, instance: AnyObject?) -> [Order] in + return Decoders.decode(clazz: [Order].self, source: source) + } +2:887a +3:887a +====1 +1:558,571c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Order in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Order() : instance as! Order + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.petId = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) + result.quantity = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) + result.shipDate = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Order.Status(rawValue: (status)) + } + + result.complete = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) + return result +2:889,931c +3:889,931c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Order() : instance as! Order + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) { + + case let .success(value): result.petId = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) { + + case let .success(value): result.quantity = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) { + + case let .success(value): result.shipDate = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Order.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) { + + case let .success(value): result.complete = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Order", actual: "\(source)")) + } +==== +1:575,578c + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } +2:935,946c + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } + ======= + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject, instance: AnyObject?) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } + >>>>>>> TEMP_RIGHT_BRANCH +3:934a +==== +1:580c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject) -> OuterBoolean in +2:948,954c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject) -> OuterBoolean in + ======= + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> OuterBoolean in + >>>>>>> TEMP_RIGHT_BRANCH +3:936c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:582c + return source +2:956,958c +3:938,940c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterBoolean", actual: "\(source)")) +====1 +1:584c + fatalError("Source \(source) is not convertible to typealias OuterBoolean: Maybe swagger file is insufficient") +2:959a +3:941a +==== +1:588,591c + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } +2:963,974c + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } + ======= + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject, instance: AnyObject?) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } + >>>>>>> TEMP_RIGHT_BRANCH +3:944a +==== +1:593,600c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + + let instance = OuterComposite() + instance.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + instance.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + instance.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return instance +2:976,1020c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + switch Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) { + + case let .success(value): result.myNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) { + + case let .success(value): result.myString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) { + + case let .success(value): result.myBoolean = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "OuterComposite", actual: "\(source)")) + } + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + + let instance = OuterComposite() + instance.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + instance.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + instance.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return instance + ======= + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + + result.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + result.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + result.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return result + >>>>>>> TEMP_RIGHT_BRANCH +3:946,970c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + switch Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) { + + case let .success(value): result.myNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) { + + case let .success(value): result.myString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) { + + case let .success(value): result.myBoolean = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "OuterComposite", actual: "\(source)")) + } +====1 +1:604,607c + // Decoder for [OuterEnum] + Decoders.addDecoder(clazz: [OuterEnum].self) { (source: AnyObject, instance: AnyObject?) -> [OuterEnum] in + return Decoders.decode(clazz: [OuterEnum].self, source: source) + } +2:1023a +3:973a +====1 +1:609,615c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> OuterEnum in + if let source = source as? String { + if let result = OuterEnum(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type OuterEnum: Maybe swagger file is insufficient") +2:1025,1027c +3:975,977c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: OuterEnum.self, source: source, instance: instance) +==== +1:619,622c + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + } +2:1031,1042c + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + } + ======= + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject, instance: AnyObject?) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + } + >>>>>>> TEMP_RIGHT_BRANCH +3:980a +==== +1:624c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject) -> OuterNumber in +2:1044,1050c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject) -> OuterNumber in + ======= + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> OuterNumber in + >>>>>>> TEMP_RIGHT_BRANCH +3:982c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:626c + return source +2:1052,1054c +3:984,986c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterNumber", actual: "\(source)")) +====1 +1:628c + fatalError("Source \(source) is not convertible to typealias OuterNumber: Maybe swagger file is insufficient") +2:1055a +3:987a +==== +1:632,635c + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } +2:1059,1070c + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } + ======= + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject, instance: AnyObject?) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } + >>>>>>> TEMP_RIGHT_BRANCH +3:990a +==== +1:637c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject) -> OuterString in +2:1072,1078c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject) -> OuterString in + ======= + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> OuterString in + >>>>>>> TEMP_RIGHT_BRANCH +3:992c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:639c + return source +2:1080,1082c +3:994,996c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterString", actual: "\(source)")) +====1 +1:641c + fatalError("Source \(source) is not convertible to typealias OuterString: Maybe swagger file is insufficient") +2:1083a +3:997a +====1 +1:645,648c + // Decoder for [Pet] + Decoders.addDecoder(clazz: [Pet].self) { (source: AnyObject, instance: AnyObject?) -> [Pet] in + return Decoders.decode(clazz: [Pet].self, source: source) + } +2:1086a +3:1000a +====1 +1:650,663c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Pet in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Pet() : instance as! Pet + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.category = Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + result.photoUrls = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["photoUrls"] as AnyObject?) + result.tags = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["tags"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Pet.Status(rawValue: (status)) + } + + return result +2:1088,1130c +3:1002,1044c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Pet() : instance as! Pet + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) { + + case let .success(value): result.category = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["photoUrls"] as AnyObject?) { + + case let .success(value): result.photoUrls = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [Tag].self, source: sourceDictionary["tags"] as AnyObject?) { + + case let .success(value): result.tags = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Pet.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Pet", actual: "\(source)")) + } +====1 +1:667,670c + // Decoder for [ReadOnlyFirst] + Decoders.addDecoder(clazz: [ReadOnlyFirst].self) { (source: AnyObject, instance: AnyObject?) -> [ReadOnlyFirst] in + return Decoders.decode(clazz: [ReadOnlyFirst].self, source: source) + } +2:1133a +3:1047a +====1 +1:672,678c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> ReadOnlyFirst in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.baz = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) + return result +2:1135,1153c +3:1049,1067c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) { + + case let .success(value): result.baz = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ReadOnlyFirst", actual: "\(source)")) + } +====1 +1:682,685c + // Decoder for [Return] + Decoders.addDecoder(clazz: [Return].self) { (source: AnyObject, instance: AnyObject?) -> [Return] in + return Decoders.decode(clazz: [Return].self, source: source) + } +2:1156a +3:1070a +====1 +1:687,692c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Return in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Return() : instance as! Return + + result._return = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) + return result +2:1158,1170c +3:1072,1084c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Return() : instance as! Return + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) { + + case let .success(value): result._return = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Return", actual: "\(source)")) + } +====1 +1:696,699c + // Decoder for [SpecialModelName] + Decoders.addDecoder(clazz: [SpecialModelName].self) { (source: AnyObject, instance: AnyObject?) -> [SpecialModelName] in + return Decoders.decode(clazz: [SpecialModelName].self, source: source) + } +2:1173a +3:1087a +====1 +1:701,706c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> SpecialModelName in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + + result.specialPropertyName = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) + return result +2:1175,1187c +3:1089,1101c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) { + + case let .success(value): result.specialPropertyName = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "SpecialModelName", actual: "\(source)")) + } +====1 +1:710,713c + // Decoder for [Tag] + Decoders.addDecoder(clazz: [Tag].self) { (source: AnyObject, instance: AnyObject?) -> [Tag] in + return Decoders.decode(clazz: [Tag].self, source: source) + } +2:1190a +3:1104a +====1 +1:715,721c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Tag in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Tag() : instance as! Tag + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:1192,1210c +3:1106,1124c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Tag() : instance as! Tag + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Tag", actual: "\(source)")) + } +====1 +1:725,728c + // Decoder for [User] + Decoders.addDecoder(clazz: [User].self) { (source: AnyObject, instance: AnyObject?) -> [User] in + return Decoders.decode(clazz: [User].self, source: source) + } +2:1213a +3:1127a +====1 +1:730,742c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> User in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? User() : instance as! User + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.username = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) + result.firstName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) + result.lastName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) + result.email = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + result.phone = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) + result.userStatus = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) + return result +2:1215,1269c +3:1129,1183c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? User() : instance as! User + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) { + + case let .success(value): result.username = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) { + + case let .success(value): result.firstName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) { + + case let .success(value): result.lastName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) { + + case let .success(value): result.email = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) { + + case let .success(value): result.phone = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) { + + case let .success(value): result.userStatus = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "User", actual: "\(source)")) + } +====1 +1:749c + } +\ No newline at end of file +2:1276c +3:1190c + } diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_minimal/diff_PetApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_minimal/diff_PetApi.php.txt new file mode 100644 index 0000000000..8a89185203 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_minimal/diff_PetApi.php.txt @@ -0,0 +1,1662 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return PetApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->addPetWithHttpInfo($body); + return $response; +2:96c +3:96c + $this->addPetWithHttpInfo($body); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/pet"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/pet'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/json', 'application/xml']); +2:120,124c +3:120,124c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:140a +2:135c +3:135c + +====1 +1:142c + $httpBody = $formParams; // for HTTP post (form) +2:137,149c +3:137,149c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:143a +2:151,162c +3:151,162c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/json', 'application/xml'] + ); + } + +====1 +1:145,146c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:164,165c +3:164,165c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:148c + // make the API Call +2:167,188c +3:167,188c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:150,158c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet' + ); +2:189a +3:189a +====1 +1:160c + return [null, $statusCode, $httpHeader]; +2:191,213c +3:191,213c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:164c + +2:216a +3:216a +====1 +1:168c + +2:219a +3:219a +====1 +1:176a +2:228c +3:228c + * @throws \InvalidArgumentException +====1 +1:181,182c + list($response) = $this->deletePetWithHttpInfo($pet_id, $api_key); + return $response; +2:233c +3:233c + $this->deletePetWithHttpInfo($pet_id, $api_key); +====1 +1:192a +2:244c +3:244c + * @throws \InvalidArgumentException +====1 +1:201,203c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:253,255c +3:253,255c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:206,211c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:258,260c +3:258,260c + $httpBody = ''; + $multipart = false; + $returnType = ''; +====1 +1:215c + $headerParams['api_key'] = $this->apiClient->getSerializer()->toHeaderValue($api_key); +2:264c +3:264c + $headerParams['api_key'] = ObjectSerializer::toHeaderValue($api_key); +====1 +1:216a +2:266c +3:266c + +====1 +1:219,223c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:269c +3:269c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +==== +1:225c + +2:271,277c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:271,272c + + +====1 +1:228a +2:281c +3:276c + +====1 +1:230c + $httpBody = $formParams; // for HTTP post (form) +2:283,306c +3:278,301c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:231a +2:308c +3:303c + +====1 +1:233,234c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:310,311c +3:305,306c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:236c + // make the API Call +2:313,334c +3:308,329c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + +====1 +1:238,246c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet/{petId}' + ); +2:335a +3:330a +====1 +1:248c + return [null, $statusCode, $httpHeader]; +2:337,359c +3:332,354c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:252c + +2:362a +3:357a +====1 +1:256c + +2:365a +3:360a +====1 +1:263a +2:373c +3:368c + * @throws \InvalidArgumentException +====1 +1:278a +2:389c +3:384c + * @throws \InvalidArgumentException +====1 +1:287,289c + // parse inputs + $resourcePath = "/pet/findByStatus"; + $httpBody = ''; +2:398,400c +3:393,395c + + $resourcePath = '/pet/findByStatus'; + $formParams = []; +====1 +1:292,297c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:403,405c +3:398,400c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet[]'; +====1 +1:301c + $status = $this->apiClient->getSerializer()->serializeCollection($status, 'csv', true); +2:409c +3:404c + $status = ObjectSerializer::serializeCollection($status, 'csv', true); +====1 +1:304c + $queryParams['status'] = $this->apiClient->getSerializer()->toQueryValue($status); +2:412c +3:407c + $queryParams['status'] = ObjectSerializer::toQueryValue($status); +==== +1:306c + +2:414,421c + + <<<<<<< HEAD + + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:409,411c + + + +====1 +1:309a +2:425c +3:415c + +====1 +1:311c + $httpBody = $formParams; // for HTTP post (form) +2:427,450c +3:417,440c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:312a +2:452c +3:442c + +====1 +1:314,315c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:454,455c +3:444,445c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:317c + // make the API Call +2:457,478c +3:447,468c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:319,327c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet[]', + '/pet/findByStatus' + ); +2:479a +3:469a +====1 +1:329c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet[]', $httpHeader), $statusCode, $httpHeader]; +2:481,517c +3:471,507c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:333c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +2:521c +3:511c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +====1 +1:337c + +2:524a +3:514a +====1 +1:341c + +2:527a +3:517a +====1 +1:348a +2:535c +3:525c + * @throws \InvalidArgumentException +====1 +1:363a +2:551c +3:541c + * @throws \InvalidArgumentException +====1 +1:372,374c + // parse inputs + $resourcePath = "/pet/findByTags"; + $httpBody = ''; +2:560,562c +3:550,552c + + $resourcePath = '/pet/findByTags'; + $formParams = []; +====1 +1:377,382c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:565,567c +3:555,557c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet[]'; +====1 +1:386c + $tags = $this->apiClient->getSerializer()->serializeCollection($tags, 'csv', true); +2:571c +3:561c + $tags = ObjectSerializer::serializeCollection($tags, 'csv', true); +====1 +1:389c + $queryParams['tags'] = $this->apiClient->getSerializer()->toQueryValue($tags); +2:574c +3:564c + $queryParams['tags'] = ObjectSerializer::toQueryValue($tags); +==== +1:391c + +2:576,583c + + <<<<<<< HEAD + + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:566,568c + + + +====1 +1:394a +2:587c +3:572c + +====1 +1:396c + $httpBody = $formParams; // for HTTP post (form) +2:589,601c +3:574,586c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:397a +2:603,614c +3:588,599c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + +====1 +1:399,400c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:616,617c +3:601,602c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:402c + // make the API Call +2:619,640c +3:604,625c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:404,412c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet[]', + '/pet/findByTags' + ); +2:641a +3:626a +====1 +1:414c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet[]', $httpHeader), $statusCode, $httpHeader]; +2:643,679c +3:628,664c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:418c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +2:683c +3:668c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +====1 +1:422c + +2:686a +3:671a +====1 +1:426c + +2:689a +3:674a +====1 +1:433a +2:697c +3:682c + * @throws \InvalidArgumentException +====1 +1:448a +2:713c +3:698c + * @throws \InvalidArgumentException +====1 +1:457,459c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:722,724c +3:707,709c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:462,467c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:727,730c +3:712,715c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet'; + +====1 +1:471,475c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:734c +3:719c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +==== +1:477c + +2:736,742c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:721,722c + + +====1 +1:480a +2:746c +3:726c + +====1 +1:482c + $httpBody = $formParams; // for HTTP post (form) +2:748,760c +3:728,740c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:484,487c + // this endpoint requires API key authentication + $apiKey = $this->apiClient->getApiKeyWithPrefix('api_key'); + if (strlen($apiKey) !== 0) { + $headerParams['api_key'] = $apiKey; +2:762,771c +3:742,751c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:489c + // make the API Call +2:773,800c +3:753,780c + + // this endpoint requires API key authentication + $apiKey = $this->config->getApiKeyWithPrefix('api_key'); + if ($apiKey !== null) { + $headers['api_key'] = $apiKey; + } + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:491,499c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet', + '/pet/{petId}' + ); +2:801a +3:781a +====1 +1:501c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet', $httpHeader), $statusCode, $httpHeader]; +2:803,839c +3:783,819c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:505c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet', $e->getResponseHeaders()); +2:843c +3:823c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet', $e->getResponseHeaders()); +====1 +1:509c + +2:846a +3:826a +====1 +1:513c + +2:849a +3:829a +====1 +1:520a +2:857c +3:837c + * @throws \InvalidArgumentException +====1 +1:525,526c + list($response) = $this->updatePetWithHttpInfo($body); + return $response; +2:862c +3:842c + $this->updatePetWithHttpInfo($body); +====1 +1:535a +2:872c +3:852c + * @throws \InvalidArgumentException +====1 +1:544,546c + // parse inputs + $resourcePath = "/pet"; + $httpBody = ''; +2:881,883c +3:861,863c + + $resourcePath = '/pet'; + $formParams = []; +====1 +1:549,554c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/json', 'application/xml']); +2:886,890c +3:866,870c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:564a +2:901c +3:881c + +====1 +1:566c + $httpBody = $formParams; // for HTTP post (form) +2:903,915c +3:883,895c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:567a +2:917,928c +3:897,908c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/json', 'application/xml'] + ); + } + +====1 +1:569,570c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:930,931c +3:910,911c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:572c + // make the API Call +2:933,954c +3:913,934c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'PUT', + $url, + $headers, + $httpBody + ); + +====1 +1:574,582c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'PUT', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet' + ); +2:955a +3:935a +====1 +1:584c + return [null, $statusCode, $httpHeader]; +2:957,979c +3:937,959c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:588c + +2:982a +3:962a +====1 +1:592c + +2:985a +3:965a +====1 +1:601a +2:995c +3:975c + * @throws \InvalidArgumentException +====1 +1:606,607c + list($response) = $this->updatePetWithFormWithHttpInfo($pet_id, $name, $status); + return $response; +2:1000c +3:980c + $this->updatePetWithFormWithHttpInfo($pet_id, $name, $status); +====1 +1:618a +2:1012c +3:992c + * @throws \InvalidArgumentException +====1 +1:627,629c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:1021,1023c +3:1001,1003c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:632,637c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/x-www-form-urlencoded']); +2:1026,1029c +3:1006,1009c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:641,645c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:1033c +3:1013c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +====1 +1:646a +2:1035c +3:1015c + +====1 +1:649c + $formParams['name'] = $this->apiClient->getSerializer()->toFormValue($name); +2:1038c +3:1018c + $formParams['name'] = ObjectSerializer::toFormValue($name); +====1 +1:653c + $formParams['status'] = $this->apiClient->getSerializer()->toFormValue($status); +2:1042c +3:1022c + $formParams['status'] = ObjectSerializer::toFormValue($status); +====1 +1:655c + +2:1044c +3:1024c + +====1 +1:658a +2:1048c +3:1028c + +====1 +1:660c + $httpBody = $formParams; // for HTTP post (form) +2:1050,1073c +3:1030,1053c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/x-www-form-urlencoded'] + ); +====1 +1:661a +2:1075c +3:1055c + +====1 +1:663,664c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:1077,1078c +3:1057,1058c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:666c + // make the API Call +2:1080,1101c +3:1060,1081c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:668,676c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet/{petId}' + ); +2:1102a +3:1082a +====1 +1:678c + return [null, $statusCode, $httpHeader]; +2:1104,1126c +3:1084,1106c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:682c + +2:1129a +3:1109a +====1 +1:686c + +2:1132a +3:1112a +====1 +1:695a +2:1142c +3:1122c + * @throws \InvalidArgumentException +====1 +1:712a +2:1160c +3:1140c + * @throws \InvalidArgumentException +====1 +1:721,723c + // parse inputs + $resourcePath = "/pet/{petId}/uploadImage"; + $httpBody = ''; +2:1169,1171c +3:1149,1151c + + $resourcePath = '/pet/{petId}/uploadImage'; + $formParams = []; +====1 +1:726,731c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['multipart/form-data']); +2:1174,1177c +3:1154,1157c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\ApiResponse'; + +====1 +1:735,739c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:1181c +3:1161c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +====1 +1:740a +2:1183c +3:1163c + +====1 +1:743c + $formParams['additionalMetadata'] = $this->apiClient->getSerializer()->toFormValue($additional_metadata); +2:1186c +3:1166c + $formParams['additionalMetadata'] = ObjectSerializer::toFormValue($additional_metadata); +====1 +1:747,753c + // PHP 5.5 introduced a CurlFile object that deprecates the old @filename syntax + // See: https://wiki.php.net/rfc/curl-file-upload + if (function_exists('curl_file_create')) { + $formParams['file'] = curl_file_create($this->apiClient->getSerializer()->toFormValue($file)); + } else { + $formParams['file'] = '@' . $this->apiClient->getSerializer()->toFormValue($file); + } +2:1190,1191c +3:1170,1171c + $multipart = true; + $formParams['file'] = \GuzzleHttp\Psr7\try_fopen(ObjectSerializer::toFormValue($file), 'rb'); +====1 +1:755c + +2:1193c +3:1173c + +====1 +1:758a +2:1197c +3:1177c + +====1 +1:760c + $httpBody = $formParams; // for HTTP post (form) +2:1199,1211c +3:1179,1191c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:761a +2:1213,1224c +3:1193,1204c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/json'], + ['multipart/form-data'] + ); + } + +====1 +1:763,764c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:1226,1227c +3:1206,1207c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:766c + // make the API Call +2:1229,1250c +3:1209,1230c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:768,776c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\ApiResponse', + '/pet/{petId}/uploadImage' + ); +2:1251a +3:1231a +====1 +1:778c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\ApiResponse', $httpHeader), $statusCode, $httpHeader]; +2:1253,1289c +3:1233,1269c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:782c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\ApiResponse', $e->getResponseHeaders()); +2:1293c +3:1273c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\ApiResponse', $e->getResponseHeaders()); +====1 +1:786c + +2:1296a +3:1276a diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_minimal/diff_StoreApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_minimal/diff_StoreApi.php.txt new file mode 100644 index 0000000000..f8e1e3ce2b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_minimal/diff_StoreApi.php.txt @@ -0,0 +1,830 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return StoreApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->deleteOrderWithHttpInfo($order_id); + return $response; +2:96c +3:96c + $this->deleteOrderWithHttpInfo($order_id); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/store/order/{order_id}"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/store/order/{order_id}'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:120,123c +3:120,123c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:134,138c + $resourcePath = str_replace( + "{" . "order_id" . "}", + $this->apiClient->getSerializer()->toPathValue($order_id), + $resourcePath + ); +2:127c +3:127c + $resourcePath = str_replace('{' . 'order_id' . '}', ObjectSerializer::toPathValue($order_id), $resourcePath); +==== +1:140c + +2:129,135c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:129,130c + + +====1 +1:143a +2:139c +3:134c + +====1 +1:145c + $httpBody = $formParams; // for HTTP post (form) +2:141,153c +3:136,148c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:147,156c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/store/order/{order_id}' +2:155,163c +3:150,158c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:157a +2:165,212c +3:160,207c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:159c + return [null, $statusCode, $httpHeader]; +2:213a +3:208a +====1 +1:163c + +2:216a +3:211a +====1 +1:167c + +2:219a +3:214a +====1 +1:173a +2:226c +3:221c + * @throws \InvalidArgumentException +====1 +1:187a +2:241c +3:236c + * @throws \InvalidArgumentException +====1 +1:192,194c + // parse inputs + $resourcePath = "/store/inventory"; + $httpBody = ''; +2:246,248c +3:241,243c + + $resourcePath = '/store/inventory'; + $formParams = []; +====1 +1:197,202c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:251,256c +3:246,251c + $httpBody = ''; + $multipart = false; + $returnType = 'map[string,int]'; + + + +====1 +1:204c + +2:257a +3:252a +====1 +1:207a +2:261c +3:256c + +====1 +1:209c + $httpBody = $formParams; // for HTTP post (form) +2:263,275c +3:258,270c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:211,214c + // this endpoint requires API key authentication + $apiKey = $this->apiClient->getApiKeyWithPrefix('api_key'); + if (strlen($apiKey) !== 0) { + $headerParams['api_key'] = $apiKey; +2:277,286c +3:272,281c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/json'], + [] + ); +====1 +1:216c + // make the API Call +2:288,315c +3:283,310c + + // this endpoint requires API key authentication + $apiKey = $this->config->getApiKeyWithPrefix('api_key'); + if ($apiKey !== null) { + $headers['api_key'] = $apiKey; + } + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:218,226c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + 'map[string,int]', + '/store/inventory' + ); +2:316a +3:311a +====1 +1:228c + return [$this->apiClient->getSerializer()->deserialize($response, 'map[string,int]', $httpHeader), $statusCode, $httpHeader]; +2:318,354c +3:313,349c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:232c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), 'map[string,int]', $e->getResponseHeaders()); +2:358c +3:353c + $data = ObjectSerializer::deserialize($e->getResponseBody(), 'map[string,int]', $e->getResponseHeaders()); +====1 +1:236c + +2:361a +3:356a +====1 +1:240c + +2:364a +3:359a +====1 +1:247a +2:372c +3:367c + * @throws \InvalidArgumentException +====1 +1:262a +2:388c +3:383c + * @throws \InvalidArgumentException +====1 +1:271c + if (($order_id > 5)) { +2:397c +3:392c + if ($order_id > 5) { +====1 +1:274c + if (($order_id < 1)) { +2:400c +3:395c + if ($order_id < 1) { +====1 +1:278,280c + // parse inputs + $resourcePath = "/store/order/{order_id}"; + $httpBody = ''; +2:404,406c +3:399,401c + + $resourcePath = '/store/order/{order_id}'; + $formParams = []; +====1 +1:283,288c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:409,412c +3:404,407c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Order'; + +====1 +1:292,296c + $resourcePath = str_replace( + "{" . "order_id" . "}", + $this->apiClient->getSerializer()->toPathValue($order_id), + $resourcePath + ); +2:416c +3:411c + $resourcePath = str_replace('{' . 'order_id' . '}', ObjectSerializer::toPathValue($order_id), $resourcePath); +==== +1:298c + +2:418,424c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:413,414c + + +====1 +1:301a +2:428c +3:418c + +====1 +1:303c + $httpBody = $formParams; // for HTTP post (form) +2:430,442c +3:420,432c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:305,314c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Order', + '/store/order/{order_id}' +2:444,452c +3:434,442c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:315a +2:454,515c +3:444,505c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:317c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Order', $httpHeader), $statusCode, $httpHeader]; +2:516a +3:506a +====1 +1:321c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +2:520c +3:510c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +====1 +1:325c + +2:523a +3:513a +====1 +1:329c + +2:526a +3:516a +====1 +1:336a +2:534c +3:524c + * @throws \InvalidArgumentException +====1 +1:351a +2:550c +3:540c + * @throws \InvalidArgumentException +====1 +1:360,362c + // parse inputs + $resourcePath = "/store/order"; + $httpBody = ''; +2:559,561c +3:549,551c + + $resourcePath = '/store/order'; + $formParams = []; +====1 +1:365,370c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:564,568c +3:554,558c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Order'; + + +====1 +1:380a +2:579c +3:569c + +====1 +1:382c + $httpBody = $formParams; // for HTTP post (form) +2:581,593c +3:571,583c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:384,393c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Order', + '/store/order' +2:595,598c +3:585,588c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:394a +2:600,666c +3:590,656c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:396c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Order', $httpHeader), $statusCode, $httpHeader]; +2:667a +3:657a +====1 +1:400c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +2:671c +3:661c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +====1 +1:404c + +2:674a +3:664a diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_minimal/diff_UserApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_minimal/diff_UserApi.php.txt new file mode 100644 index 0000000000..49eb133815 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_minimal/diff_UserApi.php.txt @@ -0,0 +1,1453 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return UserApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->createUserWithHttpInfo($body); + return $response; +2:96c +3:96c + $this->createUserWithHttpInfo($body); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/user"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/user'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:120,124c +3:120,124c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:140a +2:135c +3:135c + +====1 +1:142c + $httpBody = $formParams; // for HTTP post (form) +2:137,149c +3:137,149c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:144,153c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user' +2:151,154c +3:151,154c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:154a +2:156,208c +3:156,208c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:156c + return [null, $statusCode, $httpHeader]; +2:209a +3:209a +====1 +1:160c + +2:212a +3:212a +====1 +1:164c + +2:215a +3:215a +====1 +1:171a +2:223c +3:223c + * @throws \InvalidArgumentException +====1 +1:176,177c + list($response) = $this->createUsersWithArrayInputWithHttpInfo($body); + return $response; +2:228c +3:228c + $this->createUsersWithArrayInputWithHttpInfo($body); +====1 +1:186a +2:238c +3:238c + * @throws \InvalidArgumentException +====1 +1:195,197c + // parse inputs + $resourcePath = "/user/createWithArray"; + $httpBody = ''; +2:247,249c +3:247,249c + + $resourcePath = '/user/createWithArray'; + $formParams = []; +====1 +1:200,205c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:252,256c +3:252,256c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:215a +2:267c +3:267c + +====1 +1:217c + $httpBody = $formParams; // for HTTP post (form) +2:269,281c +3:269,281c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:219,228c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/createWithArray' +2:283,291c +3:283,291c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:229a +2:293,340c +3:293,340c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:231c + return [null, $statusCode, $httpHeader]; +2:341a +3:341a +====1 +1:235c + +2:344a +3:344a +====1 +1:239c + +2:347a +3:347a +====1 +1:246a +2:355c +3:355c + * @throws \InvalidArgumentException +====1 +1:251,252c + list($response) = $this->createUsersWithListInputWithHttpInfo($body); + return $response; +2:360c +3:360c + $this->createUsersWithListInputWithHttpInfo($body); +====1 +1:261a +2:370c +3:370c + * @throws \InvalidArgumentException +====1 +1:270,272c + // parse inputs + $resourcePath = "/user/createWithList"; + $httpBody = ''; +2:379,381c +3:379,381c + + $resourcePath = '/user/createWithList'; + $formParams = []; +====1 +1:275,280c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:384,388c +3:384,388c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:290a +2:399c +3:399c + +====1 +1:292c + $httpBody = $formParams; // for HTTP post (form) +2:401,413c +3:401,413c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:294,303c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/createWithList' +2:415,423c +3:415,423c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:304a +2:425,472c +3:425,472c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:306c + return [null, $statusCode, $httpHeader]; +2:473a +3:473a +====1 +1:310c + +2:476a +3:476a +====1 +1:314c + +2:479a +3:479a +====1 +1:321a +2:487c +3:487c + * @throws \InvalidArgumentException +====1 +1:326,327c + list($response) = $this->deleteUserWithHttpInfo($username); + return $response; +2:492c +3:492c + $this->deleteUserWithHttpInfo($username); +====1 +1:336a +2:502c +3:502c + * @throws \InvalidArgumentException +====1 +1:345,347c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:511,513c +3:511,513c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:350,355c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:516,519c +3:516,519c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:359,363c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:523c +3:523c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +==== +1:365c + +2:525,531c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:525,526c + + +====1 +1:368a +2:535c +3:530c + +====1 +1:370c + $httpBody = $formParams; // for HTTP post (form) +2:537,549c +3:532,544c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:372,381c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/{username}' +2:551,559c +3:546,554c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:382a +2:561,608c +3:556,603c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:384c + return [null, $statusCode, $httpHeader]; +2:609a +3:604a +====1 +1:388c + +2:612a +3:607a +====1 +1:392c + +2:615a +3:610a +====1 +1:399a +2:623c +3:618c + * @throws \InvalidArgumentException +====1 +1:414a +2:639c +3:634c + * @throws \InvalidArgumentException +====1 +1:423,425c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:648,650c +3:643,645c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:428,433c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:653,656c +3:648,651c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\User'; + +====1 +1:437,441c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:660c +3:655c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +==== +1:443c + +2:662,668c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:657,658c + + +====1 +1:446a +2:672c +3:662c + +====1 +1:448c + $httpBody = $formParams; // for HTTP post (form) +2:674,686c +3:664,676c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:450,459c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\User', + '/user/{username}' +2:688,691c +3:678,681c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:460a +2:693,759c +3:683,749c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:462c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\User', $httpHeader), $statusCode, $httpHeader]; +2:760a +3:750a +====1 +1:466c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\User', $e->getResponseHeaders()); +2:764c +3:754c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\User', $e->getResponseHeaders()); +====1 +1:470c + +2:767a +3:757a +====1 +1:474c + +2:770a +3:760a +====1 +1:482a +2:779c +3:769c + * @throws \InvalidArgumentException +====1 +1:498a +2:796c +3:786c + * @throws \InvalidArgumentException +====1 +1:511,513c + // parse inputs + $resourcePath = "/user/login"; + $httpBody = ''; +2:809,811c +3:799,801c + + $resourcePath = '/user/login'; + $formParams = []; +====1 +1:516,521c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:814,816c +3:804,806c + $httpBody = ''; + $multipart = false; + $returnType = 'string'; +====1 +1:525c + $queryParams['username'] = $this->apiClient->getSerializer()->toQueryValue($username); +2:820c +3:810c + $queryParams['username'] = ObjectSerializer::toQueryValue($username); +====1 +1:529c + $queryParams['password'] = $this->apiClient->getSerializer()->toQueryValue($password); +2:824c +3:814c + $queryParams['password'] = ObjectSerializer::toQueryValue($password); +==== +1:531c + +2:826,833c + + <<<<<<< HEAD + + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:816,818c + + + +====1 +1:534a +2:837c +3:822c + +====1 +1:536c + $httpBody = $formParams; // for HTTP post (form) +2:839,851c +3:824,836c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:538,547c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + 'string', + '/user/login' +2:853,861c +3:838,846c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:548a +2:863,924c +3:848,909c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:550c + return [$this->apiClient->getSerializer()->deserialize($response, 'string', $httpHeader), $statusCode, $httpHeader]; +2:925a +3:910a +====1 +1:554c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), 'string', $e->getResponseHeaders()); +2:929c +3:914c + $data = ObjectSerializer::deserialize($e->getResponseBody(), 'string', $e->getResponseHeaders()); +====1 +1:558c + +2:932a +3:917a +====1 +1:562c + +2:935a +3:920a +====1 +1:568a +2:942c +3:927c + * @throws \InvalidArgumentException +====1 +1:573,574c + list($response) = $this->logoutUserWithHttpInfo(); + return $response; +2:947c +3:932c + $this->logoutUserWithHttpInfo(); +====1 +1:582a +2:956c +3:941c + * @throws \InvalidArgumentException +====1 +1:587,589c + // parse inputs + $resourcePath = "/user/logout"; + $httpBody = ''; +2:961,963c +3:946,948c + + $resourcePath = '/user/logout'; + $formParams = []; +====1 +1:592,597c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:966,971c +3:951,956c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + + +====1 +1:599c + +2:972a +3:957a +====1 +1:602a +2:976c +3:961c + +====1 +1:604c + $httpBody = $formParams; // for HTTP post (form) +2:978,990c +3:963,975c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:606,615c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/logout' +2:992,995c +3:977,980c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:616a +2:997,1049c +3:982,1034c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:618c + return [null, $statusCode, $httpHeader]; +2:1050a +3:1035a +====1 +1:622c + +2:1053a +3:1038a +====1 +1:626c + +2:1056a +3:1041a +====1 +1:634a +2:1065c +3:1050c + * @throws \InvalidArgumentException +====1 +1:639,640c + list($response) = $this->updateUserWithHttpInfo($username, $body); + return $response; +2:1070c +3:1055c + $this->updateUserWithHttpInfo($username, $body); +====1 +1:650a +2:1081c +3:1066c + * @throws \InvalidArgumentException +====1 +1:663,665c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:1094,1096c +3:1079,1081c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:668,673c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:1099,1102c +3:1084,1087c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:677,681c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:1106c +3:1091c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +====1 +1:682a +2:1108c +3:1093c + +====1 +1:691a +2:1118c +3:1103c + +====1 +1:693c + $httpBody = $formParams; // for HTTP post (form) +2:1120,1132c +3:1105,1117c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:695,704c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'PUT', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/{username}' +2:1134,1142c +3:1119,1127c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:705a +2:1144,1191c +3:1129,1176c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'PUT', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:707c + return [null, $statusCode, $httpHeader]; +2:1192a +3:1177a +====1 +1:711c + +2:1195a +3:1180a diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_minimal/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_minimal/diff_VERSION.txt new file mode 100644 index 0000000000..ce107c7353 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_minimal/diff_VERSION.txt @@ -0,0 +1,10 @@ +1,6c1 +< <<<<<<< HEAD +< 2.3.0-SNAPSHOT +< ||||||| 4479382ced +< ======= +< 2.2.3-SNAPSHOT +< >>>>>>> TEMP_RIGHT_BRANCH +--- +> 2.3.0-SNAPSHOT +\ No newline at end of file diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_minimal/diff_io.swagger.codegen.CodegenConfig.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_minimal/diff_io.swagger.codegen.CodegenConfig.txt new file mode 100644 index 0000000000..c523742933 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_minimal/diff_io.swagger.codegen.CodegenConfig.txt @@ -0,0 +1,20 @@ +==== +1:2c + io.swagger.codegen.languages.AspNet5ServerCodegen +2:2,8c + <<<<<<< HEAD + ||||||| 4479382ced + io.swagger.codegen.languages.AspNet5ServerCodegen + ======= + io.swagger.codegen.languages.ApexClientCodegen + io.swagger.codegen.languages.AspNet5ServerCodegen + >>>>>>> TEMP_RIGHT_BRANCH +3:2c + io.swagger.codegen.languages.ApexClientCodegen +====3 +1:67c +2:73c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen +\ No newline at end of file +3:67c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_myers/diff_Models.swift.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_myers/diff_Models.swift.txt new file mode 100644 index 0000000000..d44eaa624c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_myers/diff_Models.swift.txt @@ -0,0 +1,1980 @@ +====1 +1:14c + case Error(Int, Data?, Error) +2:14,15c +3:14,15c + case HttpError(statusCode: Int, data: Data?, error: Error) + case DecodeError(response: Data?, decodeError: DecodeError) +====1 +1:37a +2:39,60c +3:39,60c + public enum Decoded { + case success(ValueType) + case failure(DecodeError) + } + + public extension Decoded { + var value: ValueType? { + switch self { + case let .success(value): + return value + case .failure: + return nil + } + } + } + + public enum DecodeError { + case typeMismatch(expected: String, actual: String) + case missingKey(key: String) + case parseError(message: String) + } + +====1 +1:42c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> T)) { +2:65c +3:65c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> Decoded)) { +====1 +1:47,50c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> T { + let key = discriminator; + if let decoder = decoders[key] { + return decoder(source, nil) as! T +2:70,73c +3:70,73c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> Decoded { + let key = discriminator + if let decoder = decoders[key], let value = decoder(source, nil) as? Decoded { + return value +====1 +1:52c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:75c +3:75c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:56,58c + static func decode(clazz: [T].Type, source: AnyObject) -> [T] { + let array = source as! [AnyObject] + return array.map { Decoders.decode(clazz: T.self, source: $0, instance: nil) } +2:79,93c +3:79,93c + static func decode(clazz: [T].Type, source: AnyObject) -> Decoded<[T]> { + if let sourceArray = source as? [AnyObject] { + var values = [T]() + for sourceValue in sourceArray { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): + values.append(value) + case let .failure(error): + return .failure(error) + } + } + return .success(values) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } +====1 +1:61,65c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> [Key:T] { + let sourceDictionary = source as! [Key: AnyObject] + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + dictionary[key] = Decoders.decode(clazz: T.self, source: value, instance: nil) +2:96,122c +3:96,122c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> Decoded<[Key:T]> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): + dictionary[key] = value + case let .failure(error): + return .failure(error) + } + } + return .success(dictionary) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } + } + + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + guard !(source is NSNull), source != nil else { return .success(nil) } + if let value = source as? T.RawValue { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "\(T.RawValue.self) matching a case from the enumeration \(T.self)", actual: String(describing: type(of: source)))) +====1 +1:67c + return dictionary +2:123a +3:123a +====1 +1:70c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> T { +2:126c +3:126c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> Decoded { +====1 +1:72,73c + if T.self is Int32.Type && source is NSNumber { + return (source as! NSNumber).int32Value as! T; +2:128,129c +3:128,129c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int32.Type { + return .success(value) +====1 +1:75,76c + if T.self is Int64.Type && source is NSNumber { + return source.int64Value as! T; +2:131,132c +3:131,132c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int64.Type { + return .success(value) +====1 +1:78,79c + if T.self is UUID.Type && source is String { + return UUID(uuidString: source as! String) as! T +2:134,135c +3:134,135c + if let intermediate = source as? String, let value = UUID(uuidString: intermediate) as? T, source is String, T.self is UUID.Type { + return .success(value) +====1 +1:81,82c + if source is T { + return source as! T +2:137,138c +3:137,138c + if let value = source as? T { + return .success(value) +====1 +1:84,85c + if T.self is Data.Type && source is String { + return Data(base64Encoded: source as! String) as! T +2:140,141c +3:140,141c + if let intermediate = source as? String, let value = Data(base64Encoded: intermediate) as? T { + return .success(value) +====1 +1:89,90c + if let decoder = decoders[key] { + return decoder(source, instance) as! T +2:145,146c +3:145,146c + if let decoder = decoders[key], let value = decoder(source, instance) as? Decoded { + return value +====1 +1:92c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:148c +3:148c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:96,102c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> T? { + if source is NSNull { + return nil + } + return source.map { (source: AnyObject) -> T in + Decoders.decode(clazz: clazz, source: source, instance: nil) + } +2:152,154c +3:152,154c + //Convert a Decoded so that its value is optional. DO WE STILL NEED THIS? + static func toOptional(decoded: Decoded) -> Decoded { + return .success(decoded.value) +====1 +1:105,107c + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> [T]? { + if source is NSNull { + return nil +2:157,164c +3:157,164c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + if let source = source, !(source is NSNull) { + switch Decoders.decode(clazz: clazz, source: source, instance: nil) { + case let .success(value): return .success(value) + case let .failure(error): return .failure(error) + } + } else { + return .success(nil) +====1 +1:109,110c + return source.map { (someSource: AnyObject) -> [T] in + Decoders.decode(clazz: clazz, source: someSource) +2:166,179c +3:166,179c + } + + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> Decoded<[T]?> { + if let source = source as? [AnyObject] { + var values = [T]() + for sourceValue in source { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): values.append(value) + case let .failure(error): return .failure(error) + } + } + return .success(values) + } else { + return .success(nil) +====1 +1:114,116c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> [Key:T]? { + if source is NSNull { + return nil +2:183,194c +3:183,194c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> Decoded<[Key:T]?> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): dictionary[key] = value + case let .failure(error): return .failure(error) + } + } + return .success(dictionary) + } else { + return .success(nil) +====1 +1:118,119c + return source.map { (someSource: AnyObject) -> [Key:T] in + Decoders.decode(clazz: clazz, source: someSource) +2:196,206c +3:196,206c + } + + static func decodeOptional(clazz: T, source: AnyObject) -> Decoded { + if let value = source as? U { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "String", actual: String(describing: type(of: source)))) +====1 +1:122a +2:210c +3:210c + +====1 +1:137c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Date in +2:225c +3:225c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:141c + return date +2:229c +3:229c + return .success(date) +====1 +1:145c + if let sourceInt = source as? Int64 { +2:233c +3:233c + if let sourceInt = source as? Int { +====1 +1:147c + return Date(timeIntervalSince1970: Double(sourceInt / 1000) ) +2:235,250c +3:235,250c + return .success(Date(timeIntervalSince1970: Double(sourceInt / 1000) )) + } + if source is String || source is Int { + return .failure(.parseError(message: "Could not decode date")) + } else { + return .failure(.typeMismatch(expected: "String or Int", actual: "\(source)")) + } + } + + // Decoder for ISOFullDate + Decoders.addDecoder(clazz: ISOFullDate.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let string = source as? String, + let isoDate = ISOFullDate.from(string: string) { + return .success(isoDate) + } else { + return .failure(.typeMismatch(expected: "ISO date", actual: "\(source)")) +====1 +1:149c + fatalError("formatter failed to parse \(source)") +2:251a +3:251a +====1 +1:152,155c + // Decoder for [AdditionalPropertiesClass] + Decoders.addDecoder(clazz: [AdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [AdditionalPropertiesClass] in + return Decoders.decode(clazz: [AdditionalPropertiesClass].self, source: source) + } +2:253a +3:253a +====1 +1:157,163c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> AdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + + result.mapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_property"] as AnyObject?) + result.mapOfMapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_of_map_property"] as AnyObject?) + return result +2:255,273c +3:255,273c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: [String:String].self, source: sourceDictionary["map_property"] as AnyObject?) { + + case let .success(value): result.mapProperty = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_of_map_property"] as AnyObject?) { + + case let .success(value): result.mapOfMapProperty = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "AdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:167,170c + // Decoder for [Animal] + Decoders.addDecoder(clazz: [Animal].self) { (source: AnyObject, instance: AnyObject?) -> [Animal] in + return Decoders.decode(clazz: [Animal].self, source: source) + } +2:276a +3:276a +====1 +1:172,176c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Animal in + let sourceDictionary = source as! [AnyHashable: Any] + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal" { + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) +2:278,299c +3:278,299c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal"{ + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) + } + let result = instance == nil ? Animal() : instance as! Animal + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Animal", actual: "\(source)")) +====1 +1:178,182c + let result = instance == nil ? Animal() : instance as! Animal + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + return result +2:300a +3:300a +====1 +1:186,194c + // Decoder for [AnimalFarm] + Decoders.addDecoder(clazz: [AnimalFarm].self) { (source: AnyObject, instance: AnyObject?) -> [AnimalFarm] in + return Decoders.decode(clazz: [AnimalFarm].self, source: source) + } + // Decoder for AnimalFarm + Decoders.addDecoder(clazz: AnimalFarm.self) { (source: AnyObject, instance: AnyObject?) -> AnimalFarm in + let sourceArray = source as! [AnyObject] + return sourceArray.map({ Decoders.decode(clazz: Animal.self, source: $0, instance: nil) }) + } +2:303a +3:303a +====1 +1:197,200c + // Decoder for [ApiResponse] + Decoders.addDecoder(clazz: [ApiResponse].self) { (source: AnyObject, instance: AnyObject?) -> [ApiResponse] in + return Decoders.decode(clazz: [ApiResponse].self, source: source) + } +2:306c +3:306c + +====1 +1:202,209c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> ApiResponse in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + + result.code = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) + result.type = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) + result.message = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) + return result +2:308,332c +3:308,332c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) { + + case let .success(value): result.code = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) { + + case let .success(value): result.type = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) { + + case let .success(value): result.message = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ApiResponse", actual: "\(source)")) + } +====1 +1:213,216c + // Decoder for [ArrayOfArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfArrayOfNumberOnly].self, source: source) + } +2:335a +3:335a +====1 +1:218,223c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + + result.arrayArrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) + return result +2:337,349c +3:337,349c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [[Double]].self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayArrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:227,230c + // Decoder for [ArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfNumberOnly].self, source: source) + } +2:352a +3:352a +====1 +1:232,237c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + + result.arrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayNumber"] as AnyObject?) + return result +2:354,366c +3:354,366c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [Double].self, source: sourceDictionary["ArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:241,244c + // Decoder for [ArrayTest] + Decoders.addDecoder(clazz: [ArrayTest].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayTest] in + return Decoders.decode(clazz: [ArrayTest].self, source: source) + } +2:369a +3:369a +====1 +1:246,253c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> ArrayTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + + result.arrayOfString = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_of_string"] as AnyObject?) + result.arrayArrayOfInteger = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) + result.arrayArrayOfModel = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_model"] as AnyObject?) + return result +2:371,395c +3:371,395c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["array_of_string"] as AnyObject?) { + + case let .success(value): result.arrayOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[Int64]].self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[ReadOnlyFirst]].self, source: sourceDictionary["array_array_of_model"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfModel = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayTest", actual: "\(source)")) + } +====1 +1:257,260c + // Decoder for [Capitalization] + Decoders.addDecoder(clazz: [Capitalization].self) { (source: AnyObject, instance: AnyObject?) -> [Capitalization] in + return Decoders.decode(clazz: [Capitalization].self, source: source) + } +2:398a +3:398a +====1 +1:262,272c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Capitalization in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Capitalization() : instance as! Capitalization + + result.smallCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) + result.capitalCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) + result.smallSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) + result.capitalSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) + result.sCAETHFlowPoints = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) + result.ATT_NAME = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) + return result +2:400,442c +3:400,442c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Capitalization() : instance as! Capitalization + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) { + + case let .success(value): result.smallCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) { + + case let .success(value): result.capitalCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) { + + case let .success(value): result.smallSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) { + + case let .success(value): result.capitalSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) { + + case let .success(value): result.sCAETHFlowPoints = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) { + + case let .success(value): result.ATT_NAME = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Capitalization", actual: "\(source)")) + } +====1 +1:276,279c + // Decoder for [Cat] + Decoders.addDecoder(clazz: [Cat].self) { (source: AnyObject, instance: AnyObject?) -> [Cat] in + return Decoders.decode(clazz: [Cat].self, source: source) + } +2:445a +3:445a +====1 +1:281,291c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Cat in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.declawed = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) + return result +2:447,474c +3:447,474c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) { + + case let .success(value): result.declawed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Cat", actual: "\(source)")) + } +====1 +1:295,298c + // Decoder for [Category] + Decoders.addDecoder(clazz: [Category].self) { (source: AnyObject, instance: AnyObject?) -> [Category] in + return Decoders.decode(clazz: [Category].self, source: source) + } +2:477a +3:477a +====1 +1:300,306c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Category in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Category() : instance as! Category + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:479,497c +3:479,497c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Category() : instance as! Category + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Category", actual: "\(source)")) + } +====1 +1:310,313c + // Decoder for [ClassModel] + Decoders.addDecoder(clazz: [ClassModel].self) { (source: AnyObject, instance: AnyObject?) -> [ClassModel] in + return Decoders.decode(clazz: [ClassModel].self, source: source) + } +2:500a +3:500a +====1 +1:315,320c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> ClassModel in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ClassModel() : instance as! ClassModel + + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) + return result +2:502,514c +3:502,514c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ClassModel() : instance as! ClassModel + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ClassModel", actual: "\(source)")) + } +====1 +1:324,327c + // Decoder for [Client] + Decoders.addDecoder(clazz: [Client].self) { (source: AnyObject, instance: AnyObject?) -> [Client] in + return Decoders.decode(clazz: [Client].self, source: source) + } +2:517a +3:517a +====1 +1:329,334c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Client in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Client() : instance as! Client + + result.client = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) + return result +2:519,531c +3:519,531c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Client() : instance as! Client + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) { + + case let .success(value): result.client = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Client", actual: "\(source)")) + } +====1 +1:338,341c + // Decoder for [Dog] + Decoders.addDecoder(clazz: [Dog].self) { (source: AnyObject, instance: AnyObject?) -> [Dog] in + return Decoders.decode(clazz: [Dog].self, source: source) + } +2:534a +3:534a +====1 +1:343,353c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Dog in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.breed = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) + return result +2:536,563c +3:536,563c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) { + + case let .success(value): result.breed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Dog", actual: "\(source)")) + } +====1 +1:357,360c + // Decoder for [EnumArrays] + Decoders.addDecoder(clazz: [EnumArrays].self) { (source: AnyObject, instance: AnyObject?) -> [EnumArrays] in + return Decoders.decode(clazz: [EnumArrays].self, source: source) + } +2:566a +3:566a +====1 +1:362,371c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> EnumArrays in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + + if let justSymbol = sourceDictionary["just_symbol"] as? String { + result.justSymbol = EnumArrays.JustSymbol(rawValue: (justSymbol)) + } + + if let arrayEnum = sourceDictionary["array_enum"] as? [String] { + result.arrayEnum = arrayEnum.map ({ EnumArrays.ArrayEnum(rawValue: $0)! }) +2:568,585c +3:568,585c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + switch Decoders.decodeOptional(clazz: EnumArrays.JustSymbol.self, source: sourceDictionary["just_symbol"] as AnyObject?) { + + case let .success(value): result.justSymbol = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_enum"] as AnyObject?) { + + case let .success(value): result.arrayEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumArrays", actual: "\(source)")) +====1 +1:373,374c + + return result +2:586a +3:586a +====1 +1:378,381c + // Decoder for [EnumClass] + Decoders.addDecoder(clazz: [EnumClass].self) { (source: AnyObject, instance: AnyObject?) -> [EnumClass] in + return Decoders.decode(clazz: [EnumClass].self, source: source) + } +2:589a +3:589a +====1 +1:383,389c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> EnumClass in + if let source = source as? String { + if let result = EnumClass(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type EnumClass: Maybe swagger file is insufficient") +2:591,593c +3:591,593c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: EnumClass.self, source: source, instance: instance) +====1 +1:393,396c + // Decoder for [EnumTest] + Decoders.addDecoder(clazz: [EnumTest].self) { (source: AnyObject, instance: AnyObject?) -> [EnumTest] in + return Decoders.decode(clazz: [EnumTest].self, source: source) + } +2:596a +3:596a +====1 +1:398,415c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> EnumTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumTest() : instance as! EnumTest + + if let enumString = sourceDictionary["enum_string"] as? String { + result.enumString = EnumTest.EnumString(rawValue: (enumString)) + } + + if let enumInteger = sourceDictionary["enum_integer"] as? Int32 { + result.enumInteger = EnumTest.EnumInteger(rawValue: (enumInteger)) + } + + if let enumNumber = sourceDictionary["enum_number"] as? Double { + result.enumNumber = EnumTest.EnumNumber(rawValue: (enumNumber)) + } + + result.outerEnum = Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) + return result +2:598,628c +3:598,628c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumTest() : instance as! EnumTest + switch Decoders.decodeOptional(clazz: EnumTest.EnumString.self, source: sourceDictionary["enum_string"] as AnyObject?) { + + case let .success(value): result.enumString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumInteger.self, source: sourceDictionary["enum_integer"] as AnyObject?) { + + case let .success(value): result.enumInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumNumber.self, source: sourceDictionary["enum_number"] as AnyObject?) { + + case let .success(value): result.enumNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) { + + case let .success(value): result.outerEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumTest", actual: "\(source)")) + } +====1 +1:419,422c + // Decoder for [FormatTest] + Decoders.addDecoder(clazz: [FormatTest].self) { (source: AnyObject, instance: AnyObject?) -> [FormatTest] in + return Decoders.decode(clazz: [FormatTest].self, source: source) + } +2:631a +3:631a +====1 +1:424,441c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> FormatTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? FormatTest() : instance as! FormatTest + + result.integer = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) + result.int32 = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) + result.int64 = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) + result.number = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) + result.float = Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) + result.double = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) + result.string = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) + result.byte = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) + result.binary = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) + result.date = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["date"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + return result +2:633,717c +3:633,717c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? FormatTest() : instance as! FormatTest + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) { + + case let .success(value): result.integer = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) { + + case let .success(value): result.int32 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) { + + case let .success(value): result.int64 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) { + + case let .success(value): result.number = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) { + + case let .success(value): result.float = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) { + + case let .success(value): result.double = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) { + + case let .success(value): result.string = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) { + + case let .success(value): result.byte = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) { + + case let .success(value): result.binary = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: ISOFullDate.self, source: sourceDictionary["date"] as AnyObject?) { + + case let .success(value): result.date = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "FormatTest", actual: "\(source)")) + } +====1 +1:445,448c + // Decoder for [HasOnlyReadOnly] + Decoders.addDecoder(clazz: [HasOnlyReadOnly].self) { (source: AnyObject, instance: AnyObject?) -> [HasOnlyReadOnly] in + return Decoders.decode(clazz: [HasOnlyReadOnly].self, source: source) + } +2:720a +3:720a +====1 +1:450,456c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> HasOnlyReadOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.foo = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) + return result +2:722,740c +3:722,740c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) { + + case let .success(value): result.foo = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "HasOnlyReadOnly", actual: "\(source)")) + } +====1 +1:460,463c + // Decoder for [List] + Decoders.addDecoder(clazz: [List].self) { (source: AnyObject, instance: AnyObject?) -> [List] in + return Decoders.decode(clazz: [List].self, source: source) + } +2:743a +3:743a +====1 +1:465,470c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> List in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? List() : instance as! List + + result._123List = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) + return result +2:745,757c +3:745,757c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? List() : instance as! List + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) { + + case let .success(value): result._123List = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "List", actual: "\(source)")) + } +====1 +1:474,477c + // Decoder for [MapTest] + Decoders.addDecoder(clazz: [MapTest].self) { (source: AnyObject, instance: AnyObject?) -> [MapTest] in + return Decoders.decode(clazz: [MapTest].self, source: source) + } +2:760a +3:760a +====1 +1:479,484c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> MapTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MapTest() : instance as! MapTest + + result.mapMapOfString = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_map_of_string"] as AnyObject?) + if let mapOfEnumString = sourceDictionary["map_of_enum_string"] as? [String:String] { //TODO: handle enum map scenario +2:762,779c +3:762,779c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MapTest() : instance as! MapTest + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_map_of_string"] as AnyObject?) { + + case let .success(value): result.mapMapOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: MapTest.MapOfEnumString.self, source: sourceDictionary["map_of_enum_string"] as AnyObject?) { + /* + case let .success(value): result.mapOfEnumString = value + case let .failure(error): return .failure(error) + */ default: break //TODO: handle enum map scenario + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MapTest", actual: "\(source)")) +====1 +1:486,487c + + return result +2:780a +3:780a +====1 +1:491,494c + // Decoder for [MixedPropertiesAndAdditionalPropertiesClass] + Decoders.addDecoder(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [MixedPropertiesAndAdditionalPropertiesClass] in + return Decoders.decode(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self, source: source) + } +2:783a +3:783a +====1 +1:496,503c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> MixedPropertiesAndAdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.map = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map"] as AnyObject?) + return result +2:785,809c +3:785,809c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:Animal].self, source: sourceDictionary["map"] as AnyObject?) { + + case let .success(value): result.map = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MixedPropertiesAndAdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:507,510c + // Decoder for [Model200Response] + Decoders.addDecoder(clazz: [Model200Response].self) { (source: AnyObject, instance: AnyObject?) -> [Model200Response] in + return Decoders.decode(clazz: [Model200Response].self, source: source) + } +2:812a +3:812a +====1 +1:512,518c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Model200Response in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Model200Response() : instance as! Model200Response + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) + return result +2:814,832c +3:814,832c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Model200Response() : instance as! Model200Response + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Model200Response", actual: "\(source)")) + } +====1 +1:522,525c + // Decoder for [Name] + Decoders.addDecoder(clazz: [Name].self) { (source: AnyObject, instance: AnyObject?) -> [Name] in + return Decoders.decode(clazz: [Name].self, source: source) + } +2:835a +3:835a +====1 +1:527,535c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Name in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Name() : instance as! Name + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result.snakeCase = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) + result.property = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) + result._123Number = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) + return result +2:837,867c +3:837,867c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Name() : instance as! Name + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) { + + case let .success(value): result.snakeCase = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) { + + case let .success(value): result.property = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) { + + case let .success(value): result._123Number = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Name", actual: "\(source)")) + } +====1 +1:539,542c + // Decoder for [NumberOnly] + Decoders.addDecoder(clazz: [NumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [NumberOnly] in + return Decoders.decode(clazz: [NumberOnly].self, source: source) + } +2:870a +3:870a +====1 +1:544,549c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> NumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + + result.justNumber = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) + return result +2:872,884c +3:872,884c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) { + + case let .success(value): result.justNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "NumberOnly", actual: "\(source)")) + } +====1 +1:553,556c + // Decoder for [Order] + Decoders.addDecoder(clazz: [Order].self) { (source: AnyObject, instance: AnyObject?) -> [Order] in + return Decoders.decode(clazz: [Order].self, source: source) + } +2:887a +3:887a +====1 +1:558,571c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Order in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Order() : instance as! Order + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.petId = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) + result.quantity = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) + result.shipDate = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Order.Status(rawValue: (status)) + } + + result.complete = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) + return result +2:889,931c +3:889,931c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Order() : instance as! Order + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) { + + case let .success(value): result.petId = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) { + + case let .success(value): result.quantity = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) { + + case let .success(value): result.shipDate = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Order.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) { + + case let .success(value): result.complete = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Order", actual: "\(source)")) + } +==== +1:575,578c + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } +2:935,946c + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } + ======= + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject, instance: AnyObject?) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } + >>>>>>> TEMP_RIGHT_BRANCH +3:934a +==== +1:580c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject) -> OuterBoolean in +2:948,954c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject) -> OuterBoolean in + ======= + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> OuterBoolean in + >>>>>>> TEMP_RIGHT_BRANCH +3:936c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:582c + return source +2:956,958c +3:938,940c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterBoolean", actual: "\(source)")) +====1 +1:584c + fatalError("Source \(source) is not convertible to typealias OuterBoolean: Maybe swagger file is insufficient") +2:959a +3:941a +==== +1:588,591c + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } +2:963,974c + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } + ======= + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject, instance: AnyObject?) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } + >>>>>>> TEMP_RIGHT_BRANCH +3:944a +==== +1:593,600c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + + let instance = OuterComposite() + instance.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + instance.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + instance.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return instance +2:976,1020c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + switch Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) { + + case let .success(value): result.myNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) { + + case let .success(value): result.myString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) { + + case let .success(value): result.myBoolean = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "OuterComposite", actual: "\(source)")) + } + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + + let instance = OuterComposite() + instance.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + instance.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + instance.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return instance + ======= + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + + result.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + result.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + result.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return result + >>>>>>> TEMP_RIGHT_BRANCH +3:946,970c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + switch Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) { + + case let .success(value): result.myNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) { + + case let .success(value): result.myString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) { + + case let .success(value): result.myBoolean = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "OuterComposite", actual: "\(source)")) + } +====1 +1:604,607c + // Decoder for [OuterEnum] + Decoders.addDecoder(clazz: [OuterEnum].self) { (source: AnyObject, instance: AnyObject?) -> [OuterEnum] in + return Decoders.decode(clazz: [OuterEnum].self, source: source) + } +2:1023a +3:973a +====1 +1:609,615c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> OuterEnum in + if let source = source as? String { + if let result = OuterEnum(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type OuterEnum: Maybe swagger file is insufficient") +2:1025,1027c +3:975,977c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: OuterEnum.self, source: source, instance: instance) +==== +1:619,622c + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + } +2:1031,1042c + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + } + ======= + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject, instance: AnyObject?) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + } + >>>>>>> TEMP_RIGHT_BRANCH +3:980a +==== +1:624c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject) -> OuterNumber in +2:1044,1050c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject) -> OuterNumber in + ======= + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> OuterNumber in + >>>>>>> TEMP_RIGHT_BRANCH +3:982c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:626c + return source +2:1052,1054c +3:984,986c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterNumber", actual: "\(source)")) +====1 +1:628c + fatalError("Source \(source) is not convertible to typealias OuterNumber: Maybe swagger file is insufficient") +2:1055a +3:987a +==== +1:632,635c + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } +2:1059,1070c + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } + ======= + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject, instance: AnyObject?) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } + >>>>>>> TEMP_RIGHT_BRANCH +3:990a +==== +1:637c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject) -> OuterString in +2:1072,1078c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject) -> OuterString in + ======= + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> OuterString in + >>>>>>> TEMP_RIGHT_BRANCH +3:992c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:639c + return source +2:1080,1082c +3:994,996c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterString", actual: "\(source)")) +====1 +1:641c + fatalError("Source \(source) is not convertible to typealias OuterString: Maybe swagger file is insufficient") +2:1083a +3:997a +====1 +1:645,648c + // Decoder for [Pet] + Decoders.addDecoder(clazz: [Pet].self) { (source: AnyObject, instance: AnyObject?) -> [Pet] in + return Decoders.decode(clazz: [Pet].self, source: source) + } +2:1086a +3:1000a +====1 +1:650,663c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Pet in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Pet() : instance as! Pet + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.category = Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + result.photoUrls = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["photoUrls"] as AnyObject?) + result.tags = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["tags"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Pet.Status(rawValue: (status)) + } + + return result +2:1088,1130c +3:1002,1044c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Pet() : instance as! Pet + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) { + + case let .success(value): result.category = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["photoUrls"] as AnyObject?) { + + case let .success(value): result.photoUrls = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [Tag].self, source: sourceDictionary["tags"] as AnyObject?) { + + case let .success(value): result.tags = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Pet.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Pet", actual: "\(source)")) + } +====1 +1:667,670c + // Decoder for [ReadOnlyFirst] + Decoders.addDecoder(clazz: [ReadOnlyFirst].self) { (source: AnyObject, instance: AnyObject?) -> [ReadOnlyFirst] in + return Decoders.decode(clazz: [ReadOnlyFirst].self, source: source) + } +2:1133a +3:1047a +====1 +1:672,678c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> ReadOnlyFirst in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.baz = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) + return result +2:1135,1153c +3:1049,1067c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) { + + case let .success(value): result.baz = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ReadOnlyFirst", actual: "\(source)")) + } +====1 +1:682,685c + // Decoder for [Return] + Decoders.addDecoder(clazz: [Return].self) { (source: AnyObject, instance: AnyObject?) -> [Return] in + return Decoders.decode(clazz: [Return].self, source: source) + } +2:1156a +3:1070a +====1 +1:687,692c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Return in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Return() : instance as! Return + + result._return = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) + return result +2:1158,1170c +3:1072,1084c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Return() : instance as! Return + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) { + + case let .success(value): result._return = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Return", actual: "\(source)")) + } +====1 +1:696,699c + // Decoder for [SpecialModelName] + Decoders.addDecoder(clazz: [SpecialModelName].self) { (source: AnyObject, instance: AnyObject?) -> [SpecialModelName] in + return Decoders.decode(clazz: [SpecialModelName].self, source: source) + } +2:1173a +3:1087a +====1 +1:701,706c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> SpecialModelName in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + + result.specialPropertyName = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) + return result +2:1175,1187c +3:1089,1101c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) { + + case let .success(value): result.specialPropertyName = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "SpecialModelName", actual: "\(source)")) + } +====1 +1:710,713c + // Decoder for [Tag] + Decoders.addDecoder(clazz: [Tag].self) { (source: AnyObject, instance: AnyObject?) -> [Tag] in + return Decoders.decode(clazz: [Tag].self, source: source) + } +2:1190a +3:1104a +====1 +1:715,721c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Tag in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Tag() : instance as! Tag + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:1192,1210c +3:1106,1124c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Tag() : instance as! Tag + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Tag", actual: "\(source)")) + } +====1 +1:725,728c + // Decoder for [User] + Decoders.addDecoder(clazz: [User].self) { (source: AnyObject, instance: AnyObject?) -> [User] in + return Decoders.decode(clazz: [User].self, source: source) + } +2:1213a +3:1127a +====1 +1:730,742c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> User in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? User() : instance as! User + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.username = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) + result.firstName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) + result.lastName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) + result.email = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + result.phone = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) + result.userStatus = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) + return result +2:1215,1269c +3:1129,1183c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? User() : instance as! User + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) { + + case let .success(value): result.username = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) { + + case let .success(value): result.firstName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) { + + case let .success(value): result.lastName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) { + + case let .success(value): result.email = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) { + + case let .success(value): result.phone = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) { + + case let .success(value): result.userStatus = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "User", actual: "\(source)")) + } +====1 +1:749c + } +\ No newline at end of file +2:1276c +3:1190c + } diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_myers/diff_PetApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_myers/diff_PetApi.php.txt new file mode 100644 index 0000000000..8a89185203 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_myers/diff_PetApi.php.txt @@ -0,0 +1,1662 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return PetApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->addPetWithHttpInfo($body); + return $response; +2:96c +3:96c + $this->addPetWithHttpInfo($body); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/pet"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/pet'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/json', 'application/xml']); +2:120,124c +3:120,124c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:140a +2:135c +3:135c + +====1 +1:142c + $httpBody = $formParams; // for HTTP post (form) +2:137,149c +3:137,149c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:143a +2:151,162c +3:151,162c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/json', 'application/xml'] + ); + } + +====1 +1:145,146c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:164,165c +3:164,165c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:148c + // make the API Call +2:167,188c +3:167,188c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:150,158c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet' + ); +2:189a +3:189a +====1 +1:160c + return [null, $statusCode, $httpHeader]; +2:191,213c +3:191,213c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:164c + +2:216a +3:216a +====1 +1:168c + +2:219a +3:219a +====1 +1:176a +2:228c +3:228c + * @throws \InvalidArgumentException +====1 +1:181,182c + list($response) = $this->deletePetWithHttpInfo($pet_id, $api_key); + return $response; +2:233c +3:233c + $this->deletePetWithHttpInfo($pet_id, $api_key); +====1 +1:192a +2:244c +3:244c + * @throws \InvalidArgumentException +====1 +1:201,203c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:253,255c +3:253,255c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:206,211c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:258,260c +3:258,260c + $httpBody = ''; + $multipart = false; + $returnType = ''; +====1 +1:215c + $headerParams['api_key'] = $this->apiClient->getSerializer()->toHeaderValue($api_key); +2:264c +3:264c + $headerParams['api_key'] = ObjectSerializer::toHeaderValue($api_key); +====1 +1:216a +2:266c +3:266c + +====1 +1:219,223c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:269c +3:269c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +==== +1:225c + +2:271,277c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:271,272c + + +====1 +1:228a +2:281c +3:276c + +====1 +1:230c + $httpBody = $formParams; // for HTTP post (form) +2:283,306c +3:278,301c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:231a +2:308c +3:303c + +====1 +1:233,234c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:310,311c +3:305,306c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:236c + // make the API Call +2:313,334c +3:308,329c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + +====1 +1:238,246c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet/{petId}' + ); +2:335a +3:330a +====1 +1:248c + return [null, $statusCode, $httpHeader]; +2:337,359c +3:332,354c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:252c + +2:362a +3:357a +====1 +1:256c + +2:365a +3:360a +====1 +1:263a +2:373c +3:368c + * @throws \InvalidArgumentException +====1 +1:278a +2:389c +3:384c + * @throws \InvalidArgumentException +====1 +1:287,289c + // parse inputs + $resourcePath = "/pet/findByStatus"; + $httpBody = ''; +2:398,400c +3:393,395c + + $resourcePath = '/pet/findByStatus'; + $formParams = []; +====1 +1:292,297c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:403,405c +3:398,400c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet[]'; +====1 +1:301c + $status = $this->apiClient->getSerializer()->serializeCollection($status, 'csv', true); +2:409c +3:404c + $status = ObjectSerializer::serializeCollection($status, 'csv', true); +====1 +1:304c + $queryParams['status'] = $this->apiClient->getSerializer()->toQueryValue($status); +2:412c +3:407c + $queryParams['status'] = ObjectSerializer::toQueryValue($status); +==== +1:306c + +2:414,421c + + <<<<<<< HEAD + + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:409,411c + + + +====1 +1:309a +2:425c +3:415c + +====1 +1:311c + $httpBody = $formParams; // for HTTP post (form) +2:427,450c +3:417,440c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:312a +2:452c +3:442c + +====1 +1:314,315c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:454,455c +3:444,445c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:317c + // make the API Call +2:457,478c +3:447,468c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:319,327c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet[]', + '/pet/findByStatus' + ); +2:479a +3:469a +====1 +1:329c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet[]', $httpHeader), $statusCode, $httpHeader]; +2:481,517c +3:471,507c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:333c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +2:521c +3:511c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +====1 +1:337c + +2:524a +3:514a +====1 +1:341c + +2:527a +3:517a +====1 +1:348a +2:535c +3:525c + * @throws \InvalidArgumentException +====1 +1:363a +2:551c +3:541c + * @throws \InvalidArgumentException +====1 +1:372,374c + // parse inputs + $resourcePath = "/pet/findByTags"; + $httpBody = ''; +2:560,562c +3:550,552c + + $resourcePath = '/pet/findByTags'; + $formParams = []; +====1 +1:377,382c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:565,567c +3:555,557c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet[]'; +====1 +1:386c + $tags = $this->apiClient->getSerializer()->serializeCollection($tags, 'csv', true); +2:571c +3:561c + $tags = ObjectSerializer::serializeCollection($tags, 'csv', true); +====1 +1:389c + $queryParams['tags'] = $this->apiClient->getSerializer()->toQueryValue($tags); +2:574c +3:564c + $queryParams['tags'] = ObjectSerializer::toQueryValue($tags); +==== +1:391c + +2:576,583c + + <<<<<<< HEAD + + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:566,568c + + + +====1 +1:394a +2:587c +3:572c + +====1 +1:396c + $httpBody = $formParams; // for HTTP post (form) +2:589,601c +3:574,586c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:397a +2:603,614c +3:588,599c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + +====1 +1:399,400c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:616,617c +3:601,602c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:402c + // make the API Call +2:619,640c +3:604,625c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:404,412c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet[]', + '/pet/findByTags' + ); +2:641a +3:626a +====1 +1:414c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet[]', $httpHeader), $statusCode, $httpHeader]; +2:643,679c +3:628,664c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:418c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +2:683c +3:668c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +====1 +1:422c + +2:686a +3:671a +====1 +1:426c + +2:689a +3:674a +====1 +1:433a +2:697c +3:682c + * @throws \InvalidArgumentException +====1 +1:448a +2:713c +3:698c + * @throws \InvalidArgumentException +====1 +1:457,459c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:722,724c +3:707,709c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:462,467c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:727,730c +3:712,715c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet'; + +====1 +1:471,475c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:734c +3:719c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +==== +1:477c + +2:736,742c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:721,722c + + +====1 +1:480a +2:746c +3:726c + +====1 +1:482c + $httpBody = $formParams; // for HTTP post (form) +2:748,760c +3:728,740c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:484,487c + // this endpoint requires API key authentication + $apiKey = $this->apiClient->getApiKeyWithPrefix('api_key'); + if (strlen($apiKey) !== 0) { + $headerParams['api_key'] = $apiKey; +2:762,771c +3:742,751c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:489c + // make the API Call +2:773,800c +3:753,780c + + // this endpoint requires API key authentication + $apiKey = $this->config->getApiKeyWithPrefix('api_key'); + if ($apiKey !== null) { + $headers['api_key'] = $apiKey; + } + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:491,499c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet', + '/pet/{petId}' + ); +2:801a +3:781a +====1 +1:501c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet', $httpHeader), $statusCode, $httpHeader]; +2:803,839c +3:783,819c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:505c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet', $e->getResponseHeaders()); +2:843c +3:823c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet', $e->getResponseHeaders()); +====1 +1:509c + +2:846a +3:826a +====1 +1:513c + +2:849a +3:829a +====1 +1:520a +2:857c +3:837c + * @throws \InvalidArgumentException +====1 +1:525,526c + list($response) = $this->updatePetWithHttpInfo($body); + return $response; +2:862c +3:842c + $this->updatePetWithHttpInfo($body); +====1 +1:535a +2:872c +3:852c + * @throws \InvalidArgumentException +====1 +1:544,546c + // parse inputs + $resourcePath = "/pet"; + $httpBody = ''; +2:881,883c +3:861,863c + + $resourcePath = '/pet'; + $formParams = []; +====1 +1:549,554c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/json', 'application/xml']); +2:886,890c +3:866,870c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:564a +2:901c +3:881c + +====1 +1:566c + $httpBody = $formParams; // for HTTP post (form) +2:903,915c +3:883,895c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:567a +2:917,928c +3:897,908c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/json', 'application/xml'] + ); + } + +====1 +1:569,570c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:930,931c +3:910,911c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:572c + // make the API Call +2:933,954c +3:913,934c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'PUT', + $url, + $headers, + $httpBody + ); + +====1 +1:574,582c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'PUT', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet' + ); +2:955a +3:935a +====1 +1:584c + return [null, $statusCode, $httpHeader]; +2:957,979c +3:937,959c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:588c + +2:982a +3:962a +====1 +1:592c + +2:985a +3:965a +====1 +1:601a +2:995c +3:975c + * @throws \InvalidArgumentException +====1 +1:606,607c + list($response) = $this->updatePetWithFormWithHttpInfo($pet_id, $name, $status); + return $response; +2:1000c +3:980c + $this->updatePetWithFormWithHttpInfo($pet_id, $name, $status); +====1 +1:618a +2:1012c +3:992c + * @throws \InvalidArgumentException +====1 +1:627,629c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:1021,1023c +3:1001,1003c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:632,637c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/x-www-form-urlencoded']); +2:1026,1029c +3:1006,1009c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:641,645c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:1033c +3:1013c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +====1 +1:646a +2:1035c +3:1015c + +====1 +1:649c + $formParams['name'] = $this->apiClient->getSerializer()->toFormValue($name); +2:1038c +3:1018c + $formParams['name'] = ObjectSerializer::toFormValue($name); +====1 +1:653c + $formParams['status'] = $this->apiClient->getSerializer()->toFormValue($status); +2:1042c +3:1022c + $formParams['status'] = ObjectSerializer::toFormValue($status); +====1 +1:655c + +2:1044c +3:1024c + +====1 +1:658a +2:1048c +3:1028c + +====1 +1:660c + $httpBody = $formParams; // for HTTP post (form) +2:1050,1073c +3:1030,1053c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/x-www-form-urlencoded'] + ); +====1 +1:661a +2:1075c +3:1055c + +====1 +1:663,664c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:1077,1078c +3:1057,1058c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:666c + // make the API Call +2:1080,1101c +3:1060,1081c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:668,676c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet/{petId}' + ); +2:1102a +3:1082a +====1 +1:678c + return [null, $statusCode, $httpHeader]; +2:1104,1126c +3:1084,1106c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:682c + +2:1129a +3:1109a +====1 +1:686c + +2:1132a +3:1112a +====1 +1:695a +2:1142c +3:1122c + * @throws \InvalidArgumentException +====1 +1:712a +2:1160c +3:1140c + * @throws \InvalidArgumentException +====1 +1:721,723c + // parse inputs + $resourcePath = "/pet/{petId}/uploadImage"; + $httpBody = ''; +2:1169,1171c +3:1149,1151c + + $resourcePath = '/pet/{petId}/uploadImage'; + $formParams = []; +====1 +1:726,731c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['multipart/form-data']); +2:1174,1177c +3:1154,1157c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\ApiResponse'; + +====1 +1:735,739c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:1181c +3:1161c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +====1 +1:740a +2:1183c +3:1163c + +====1 +1:743c + $formParams['additionalMetadata'] = $this->apiClient->getSerializer()->toFormValue($additional_metadata); +2:1186c +3:1166c + $formParams['additionalMetadata'] = ObjectSerializer::toFormValue($additional_metadata); +====1 +1:747,753c + // PHP 5.5 introduced a CurlFile object that deprecates the old @filename syntax + // See: https://wiki.php.net/rfc/curl-file-upload + if (function_exists('curl_file_create')) { + $formParams['file'] = curl_file_create($this->apiClient->getSerializer()->toFormValue($file)); + } else { + $formParams['file'] = '@' . $this->apiClient->getSerializer()->toFormValue($file); + } +2:1190,1191c +3:1170,1171c + $multipart = true; + $formParams['file'] = \GuzzleHttp\Psr7\try_fopen(ObjectSerializer::toFormValue($file), 'rb'); +====1 +1:755c + +2:1193c +3:1173c + +====1 +1:758a +2:1197c +3:1177c + +====1 +1:760c + $httpBody = $formParams; // for HTTP post (form) +2:1199,1211c +3:1179,1191c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:761a +2:1213,1224c +3:1193,1204c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/json'], + ['multipart/form-data'] + ); + } + +====1 +1:763,764c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:1226,1227c +3:1206,1207c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:766c + // make the API Call +2:1229,1250c +3:1209,1230c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:768,776c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\ApiResponse', + '/pet/{petId}/uploadImage' + ); +2:1251a +3:1231a +====1 +1:778c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\ApiResponse', $httpHeader), $statusCode, $httpHeader]; +2:1253,1289c +3:1233,1269c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:782c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\ApiResponse', $e->getResponseHeaders()); +2:1293c +3:1273c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\ApiResponse', $e->getResponseHeaders()); +====1 +1:786c + +2:1296a +3:1276a diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_myers/diff_StoreApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_myers/diff_StoreApi.php.txt new file mode 100644 index 0000000000..f8e1e3ce2b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_myers/diff_StoreApi.php.txt @@ -0,0 +1,830 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return StoreApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->deleteOrderWithHttpInfo($order_id); + return $response; +2:96c +3:96c + $this->deleteOrderWithHttpInfo($order_id); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/store/order/{order_id}"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/store/order/{order_id}'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:120,123c +3:120,123c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:134,138c + $resourcePath = str_replace( + "{" . "order_id" . "}", + $this->apiClient->getSerializer()->toPathValue($order_id), + $resourcePath + ); +2:127c +3:127c + $resourcePath = str_replace('{' . 'order_id' . '}', ObjectSerializer::toPathValue($order_id), $resourcePath); +==== +1:140c + +2:129,135c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:129,130c + + +====1 +1:143a +2:139c +3:134c + +====1 +1:145c + $httpBody = $formParams; // for HTTP post (form) +2:141,153c +3:136,148c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:147,156c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/store/order/{order_id}' +2:155,163c +3:150,158c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:157a +2:165,212c +3:160,207c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:159c + return [null, $statusCode, $httpHeader]; +2:213a +3:208a +====1 +1:163c + +2:216a +3:211a +====1 +1:167c + +2:219a +3:214a +====1 +1:173a +2:226c +3:221c + * @throws \InvalidArgumentException +====1 +1:187a +2:241c +3:236c + * @throws \InvalidArgumentException +====1 +1:192,194c + // parse inputs + $resourcePath = "/store/inventory"; + $httpBody = ''; +2:246,248c +3:241,243c + + $resourcePath = '/store/inventory'; + $formParams = []; +====1 +1:197,202c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:251,256c +3:246,251c + $httpBody = ''; + $multipart = false; + $returnType = 'map[string,int]'; + + + +====1 +1:204c + +2:257a +3:252a +====1 +1:207a +2:261c +3:256c + +====1 +1:209c + $httpBody = $formParams; // for HTTP post (form) +2:263,275c +3:258,270c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:211,214c + // this endpoint requires API key authentication + $apiKey = $this->apiClient->getApiKeyWithPrefix('api_key'); + if (strlen($apiKey) !== 0) { + $headerParams['api_key'] = $apiKey; +2:277,286c +3:272,281c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/json'], + [] + ); +====1 +1:216c + // make the API Call +2:288,315c +3:283,310c + + // this endpoint requires API key authentication + $apiKey = $this->config->getApiKeyWithPrefix('api_key'); + if ($apiKey !== null) { + $headers['api_key'] = $apiKey; + } + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:218,226c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + 'map[string,int]', + '/store/inventory' + ); +2:316a +3:311a +====1 +1:228c + return [$this->apiClient->getSerializer()->deserialize($response, 'map[string,int]', $httpHeader), $statusCode, $httpHeader]; +2:318,354c +3:313,349c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:232c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), 'map[string,int]', $e->getResponseHeaders()); +2:358c +3:353c + $data = ObjectSerializer::deserialize($e->getResponseBody(), 'map[string,int]', $e->getResponseHeaders()); +====1 +1:236c + +2:361a +3:356a +====1 +1:240c + +2:364a +3:359a +====1 +1:247a +2:372c +3:367c + * @throws \InvalidArgumentException +====1 +1:262a +2:388c +3:383c + * @throws \InvalidArgumentException +====1 +1:271c + if (($order_id > 5)) { +2:397c +3:392c + if ($order_id > 5) { +====1 +1:274c + if (($order_id < 1)) { +2:400c +3:395c + if ($order_id < 1) { +====1 +1:278,280c + // parse inputs + $resourcePath = "/store/order/{order_id}"; + $httpBody = ''; +2:404,406c +3:399,401c + + $resourcePath = '/store/order/{order_id}'; + $formParams = []; +====1 +1:283,288c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:409,412c +3:404,407c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Order'; + +====1 +1:292,296c + $resourcePath = str_replace( + "{" . "order_id" . "}", + $this->apiClient->getSerializer()->toPathValue($order_id), + $resourcePath + ); +2:416c +3:411c + $resourcePath = str_replace('{' . 'order_id' . '}', ObjectSerializer::toPathValue($order_id), $resourcePath); +==== +1:298c + +2:418,424c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:413,414c + + +====1 +1:301a +2:428c +3:418c + +====1 +1:303c + $httpBody = $formParams; // for HTTP post (form) +2:430,442c +3:420,432c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:305,314c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Order', + '/store/order/{order_id}' +2:444,452c +3:434,442c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:315a +2:454,515c +3:444,505c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:317c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Order', $httpHeader), $statusCode, $httpHeader]; +2:516a +3:506a +====1 +1:321c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +2:520c +3:510c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +====1 +1:325c + +2:523a +3:513a +====1 +1:329c + +2:526a +3:516a +====1 +1:336a +2:534c +3:524c + * @throws \InvalidArgumentException +====1 +1:351a +2:550c +3:540c + * @throws \InvalidArgumentException +====1 +1:360,362c + // parse inputs + $resourcePath = "/store/order"; + $httpBody = ''; +2:559,561c +3:549,551c + + $resourcePath = '/store/order'; + $formParams = []; +====1 +1:365,370c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:564,568c +3:554,558c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Order'; + + +====1 +1:380a +2:579c +3:569c + +====1 +1:382c + $httpBody = $formParams; // for HTTP post (form) +2:581,593c +3:571,583c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:384,393c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Order', + '/store/order' +2:595,598c +3:585,588c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:394a +2:600,666c +3:590,656c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:396c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Order', $httpHeader), $statusCode, $httpHeader]; +2:667a +3:657a +====1 +1:400c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +2:671c +3:661c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +====1 +1:404c + +2:674a +3:664a diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_myers/diff_UserApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_myers/diff_UserApi.php.txt new file mode 100644 index 0000000000..49eb133815 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_myers/diff_UserApi.php.txt @@ -0,0 +1,1453 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return UserApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->createUserWithHttpInfo($body); + return $response; +2:96c +3:96c + $this->createUserWithHttpInfo($body); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/user"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/user'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:120,124c +3:120,124c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:140a +2:135c +3:135c + +====1 +1:142c + $httpBody = $formParams; // for HTTP post (form) +2:137,149c +3:137,149c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:144,153c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user' +2:151,154c +3:151,154c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:154a +2:156,208c +3:156,208c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:156c + return [null, $statusCode, $httpHeader]; +2:209a +3:209a +====1 +1:160c + +2:212a +3:212a +====1 +1:164c + +2:215a +3:215a +====1 +1:171a +2:223c +3:223c + * @throws \InvalidArgumentException +====1 +1:176,177c + list($response) = $this->createUsersWithArrayInputWithHttpInfo($body); + return $response; +2:228c +3:228c + $this->createUsersWithArrayInputWithHttpInfo($body); +====1 +1:186a +2:238c +3:238c + * @throws \InvalidArgumentException +====1 +1:195,197c + // parse inputs + $resourcePath = "/user/createWithArray"; + $httpBody = ''; +2:247,249c +3:247,249c + + $resourcePath = '/user/createWithArray'; + $formParams = []; +====1 +1:200,205c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:252,256c +3:252,256c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:215a +2:267c +3:267c + +====1 +1:217c + $httpBody = $formParams; // for HTTP post (form) +2:269,281c +3:269,281c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:219,228c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/createWithArray' +2:283,291c +3:283,291c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:229a +2:293,340c +3:293,340c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:231c + return [null, $statusCode, $httpHeader]; +2:341a +3:341a +====1 +1:235c + +2:344a +3:344a +====1 +1:239c + +2:347a +3:347a +====1 +1:246a +2:355c +3:355c + * @throws \InvalidArgumentException +====1 +1:251,252c + list($response) = $this->createUsersWithListInputWithHttpInfo($body); + return $response; +2:360c +3:360c + $this->createUsersWithListInputWithHttpInfo($body); +====1 +1:261a +2:370c +3:370c + * @throws \InvalidArgumentException +====1 +1:270,272c + // parse inputs + $resourcePath = "/user/createWithList"; + $httpBody = ''; +2:379,381c +3:379,381c + + $resourcePath = '/user/createWithList'; + $formParams = []; +====1 +1:275,280c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:384,388c +3:384,388c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:290a +2:399c +3:399c + +====1 +1:292c + $httpBody = $formParams; // for HTTP post (form) +2:401,413c +3:401,413c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:294,303c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/createWithList' +2:415,423c +3:415,423c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:304a +2:425,472c +3:425,472c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:306c + return [null, $statusCode, $httpHeader]; +2:473a +3:473a +====1 +1:310c + +2:476a +3:476a +====1 +1:314c + +2:479a +3:479a +====1 +1:321a +2:487c +3:487c + * @throws \InvalidArgumentException +====1 +1:326,327c + list($response) = $this->deleteUserWithHttpInfo($username); + return $response; +2:492c +3:492c + $this->deleteUserWithHttpInfo($username); +====1 +1:336a +2:502c +3:502c + * @throws \InvalidArgumentException +====1 +1:345,347c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:511,513c +3:511,513c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:350,355c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:516,519c +3:516,519c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:359,363c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:523c +3:523c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +==== +1:365c + +2:525,531c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:525,526c + + +====1 +1:368a +2:535c +3:530c + +====1 +1:370c + $httpBody = $formParams; // for HTTP post (form) +2:537,549c +3:532,544c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:372,381c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/{username}' +2:551,559c +3:546,554c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:382a +2:561,608c +3:556,603c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:384c + return [null, $statusCode, $httpHeader]; +2:609a +3:604a +====1 +1:388c + +2:612a +3:607a +====1 +1:392c + +2:615a +3:610a +====1 +1:399a +2:623c +3:618c + * @throws \InvalidArgumentException +====1 +1:414a +2:639c +3:634c + * @throws \InvalidArgumentException +====1 +1:423,425c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:648,650c +3:643,645c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:428,433c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:653,656c +3:648,651c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\User'; + +====1 +1:437,441c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:660c +3:655c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +==== +1:443c + +2:662,668c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:657,658c + + +====1 +1:446a +2:672c +3:662c + +====1 +1:448c + $httpBody = $formParams; // for HTTP post (form) +2:674,686c +3:664,676c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:450,459c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\User', + '/user/{username}' +2:688,691c +3:678,681c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:460a +2:693,759c +3:683,749c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:462c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\User', $httpHeader), $statusCode, $httpHeader]; +2:760a +3:750a +====1 +1:466c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\User', $e->getResponseHeaders()); +2:764c +3:754c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\User', $e->getResponseHeaders()); +====1 +1:470c + +2:767a +3:757a +====1 +1:474c + +2:770a +3:760a +====1 +1:482a +2:779c +3:769c + * @throws \InvalidArgumentException +====1 +1:498a +2:796c +3:786c + * @throws \InvalidArgumentException +====1 +1:511,513c + // parse inputs + $resourcePath = "/user/login"; + $httpBody = ''; +2:809,811c +3:799,801c + + $resourcePath = '/user/login'; + $formParams = []; +====1 +1:516,521c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:814,816c +3:804,806c + $httpBody = ''; + $multipart = false; + $returnType = 'string'; +====1 +1:525c + $queryParams['username'] = $this->apiClient->getSerializer()->toQueryValue($username); +2:820c +3:810c + $queryParams['username'] = ObjectSerializer::toQueryValue($username); +====1 +1:529c + $queryParams['password'] = $this->apiClient->getSerializer()->toQueryValue($password); +2:824c +3:814c + $queryParams['password'] = ObjectSerializer::toQueryValue($password); +==== +1:531c + +2:826,833c + + <<<<<<< HEAD + + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:816,818c + + + +====1 +1:534a +2:837c +3:822c + +====1 +1:536c + $httpBody = $formParams; // for HTTP post (form) +2:839,851c +3:824,836c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:538,547c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + 'string', + '/user/login' +2:853,861c +3:838,846c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:548a +2:863,924c +3:848,909c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:550c + return [$this->apiClient->getSerializer()->deserialize($response, 'string', $httpHeader), $statusCode, $httpHeader]; +2:925a +3:910a +====1 +1:554c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), 'string', $e->getResponseHeaders()); +2:929c +3:914c + $data = ObjectSerializer::deserialize($e->getResponseBody(), 'string', $e->getResponseHeaders()); +====1 +1:558c + +2:932a +3:917a +====1 +1:562c + +2:935a +3:920a +====1 +1:568a +2:942c +3:927c + * @throws \InvalidArgumentException +====1 +1:573,574c + list($response) = $this->logoutUserWithHttpInfo(); + return $response; +2:947c +3:932c + $this->logoutUserWithHttpInfo(); +====1 +1:582a +2:956c +3:941c + * @throws \InvalidArgumentException +====1 +1:587,589c + // parse inputs + $resourcePath = "/user/logout"; + $httpBody = ''; +2:961,963c +3:946,948c + + $resourcePath = '/user/logout'; + $formParams = []; +====1 +1:592,597c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:966,971c +3:951,956c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + + +====1 +1:599c + +2:972a +3:957a +====1 +1:602a +2:976c +3:961c + +====1 +1:604c + $httpBody = $formParams; // for HTTP post (form) +2:978,990c +3:963,975c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:606,615c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/logout' +2:992,995c +3:977,980c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:616a +2:997,1049c +3:982,1034c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:618c + return [null, $statusCode, $httpHeader]; +2:1050a +3:1035a +====1 +1:622c + +2:1053a +3:1038a +====1 +1:626c + +2:1056a +3:1041a +====1 +1:634a +2:1065c +3:1050c + * @throws \InvalidArgumentException +====1 +1:639,640c + list($response) = $this->updateUserWithHttpInfo($username, $body); + return $response; +2:1070c +3:1055c + $this->updateUserWithHttpInfo($username, $body); +====1 +1:650a +2:1081c +3:1066c + * @throws \InvalidArgumentException +====1 +1:663,665c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:1094,1096c +3:1079,1081c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:668,673c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:1099,1102c +3:1084,1087c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:677,681c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:1106c +3:1091c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +====1 +1:682a +2:1108c +3:1093c + +====1 +1:691a +2:1118c +3:1103c + +====1 +1:693c + $httpBody = $formParams; // for HTTP post (form) +2:1120,1132c +3:1105,1117c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:695,704c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'PUT', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/{username}' +2:1134,1142c +3:1119,1127c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:705a +2:1144,1191c +3:1129,1176c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'PUT', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:707c + return [null, $statusCode, $httpHeader]; +2:1192a +3:1177a +====1 +1:711c + +2:1195a +3:1180a diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_myers/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_myers/diff_VERSION.txt new file mode 100644 index 0000000000..ce107c7353 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_myers/diff_VERSION.txt @@ -0,0 +1,10 @@ +1,6c1 +< <<<<<<< HEAD +< 2.3.0-SNAPSHOT +< ||||||| 4479382ced +< ======= +< 2.2.3-SNAPSHOT +< >>>>>>> TEMP_RIGHT_BRANCH +--- +> 2.3.0-SNAPSHOT +\ No newline at end of file diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_myers/diff_io.swagger.codegen.CodegenConfig.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_myers/diff_io.swagger.codegen.CodegenConfig.txt new file mode 100644 index 0000000000..c523742933 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_myers/diff_io.swagger.codegen.CodegenConfig.txt @@ -0,0 +1,20 @@ +==== +1:2c + io.swagger.codegen.languages.AspNet5ServerCodegen +2:2,8c + <<<<<<< HEAD + ||||||| 4479382ced + io.swagger.codegen.languages.AspNet5ServerCodegen + ======= + io.swagger.codegen.languages.ApexClientCodegen + io.swagger.codegen.languages.AspNet5ServerCodegen + >>>>>>> TEMP_RIGHT_BRANCH +3:2c + io.swagger.codegen.languages.ApexClientCodegen +====3 +1:67c +2:73c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen +\ No newline at end of file +3:67c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_patience/diff_Models.swift.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_patience/diff_Models.swift.txt new file mode 100644 index 0000000000..d44eaa624c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_patience/diff_Models.swift.txt @@ -0,0 +1,1980 @@ +====1 +1:14c + case Error(Int, Data?, Error) +2:14,15c +3:14,15c + case HttpError(statusCode: Int, data: Data?, error: Error) + case DecodeError(response: Data?, decodeError: DecodeError) +====1 +1:37a +2:39,60c +3:39,60c + public enum Decoded { + case success(ValueType) + case failure(DecodeError) + } + + public extension Decoded { + var value: ValueType? { + switch self { + case let .success(value): + return value + case .failure: + return nil + } + } + } + + public enum DecodeError { + case typeMismatch(expected: String, actual: String) + case missingKey(key: String) + case parseError(message: String) + } + +====1 +1:42c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> T)) { +2:65c +3:65c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> Decoded)) { +====1 +1:47,50c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> T { + let key = discriminator; + if let decoder = decoders[key] { + return decoder(source, nil) as! T +2:70,73c +3:70,73c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> Decoded { + let key = discriminator + if let decoder = decoders[key], let value = decoder(source, nil) as? Decoded { + return value +====1 +1:52c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:75c +3:75c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:56,58c + static func decode(clazz: [T].Type, source: AnyObject) -> [T] { + let array = source as! [AnyObject] + return array.map { Decoders.decode(clazz: T.self, source: $0, instance: nil) } +2:79,93c +3:79,93c + static func decode(clazz: [T].Type, source: AnyObject) -> Decoded<[T]> { + if let sourceArray = source as? [AnyObject] { + var values = [T]() + for sourceValue in sourceArray { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): + values.append(value) + case let .failure(error): + return .failure(error) + } + } + return .success(values) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } +====1 +1:61,65c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> [Key:T] { + let sourceDictionary = source as! [Key: AnyObject] + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + dictionary[key] = Decoders.decode(clazz: T.self, source: value, instance: nil) +2:96,122c +3:96,122c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> Decoded<[Key:T]> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): + dictionary[key] = value + case let .failure(error): + return .failure(error) + } + } + return .success(dictionary) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } + } + + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + guard !(source is NSNull), source != nil else { return .success(nil) } + if let value = source as? T.RawValue { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "\(T.RawValue.self) matching a case from the enumeration \(T.self)", actual: String(describing: type(of: source)))) +====1 +1:67c + return dictionary +2:123a +3:123a +====1 +1:70c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> T { +2:126c +3:126c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> Decoded { +====1 +1:72,73c + if T.self is Int32.Type && source is NSNumber { + return (source as! NSNumber).int32Value as! T; +2:128,129c +3:128,129c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int32.Type { + return .success(value) +====1 +1:75,76c + if T.self is Int64.Type && source is NSNumber { + return source.int64Value as! T; +2:131,132c +3:131,132c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int64.Type { + return .success(value) +====1 +1:78,79c + if T.self is UUID.Type && source is String { + return UUID(uuidString: source as! String) as! T +2:134,135c +3:134,135c + if let intermediate = source as? String, let value = UUID(uuidString: intermediate) as? T, source is String, T.self is UUID.Type { + return .success(value) +====1 +1:81,82c + if source is T { + return source as! T +2:137,138c +3:137,138c + if let value = source as? T { + return .success(value) +====1 +1:84,85c + if T.self is Data.Type && source is String { + return Data(base64Encoded: source as! String) as! T +2:140,141c +3:140,141c + if let intermediate = source as? String, let value = Data(base64Encoded: intermediate) as? T { + return .success(value) +====1 +1:89,90c + if let decoder = decoders[key] { + return decoder(source, instance) as! T +2:145,146c +3:145,146c + if let decoder = decoders[key], let value = decoder(source, instance) as? Decoded { + return value +====1 +1:92c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:148c +3:148c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:96,102c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> T? { + if source is NSNull { + return nil + } + return source.map { (source: AnyObject) -> T in + Decoders.decode(clazz: clazz, source: source, instance: nil) + } +2:152,154c +3:152,154c + //Convert a Decoded so that its value is optional. DO WE STILL NEED THIS? + static func toOptional(decoded: Decoded) -> Decoded { + return .success(decoded.value) +====1 +1:105,107c + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> [T]? { + if source is NSNull { + return nil +2:157,164c +3:157,164c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + if let source = source, !(source is NSNull) { + switch Decoders.decode(clazz: clazz, source: source, instance: nil) { + case let .success(value): return .success(value) + case let .failure(error): return .failure(error) + } + } else { + return .success(nil) +====1 +1:109,110c + return source.map { (someSource: AnyObject) -> [T] in + Decoders.decode(clazz: clazz, source: someSource) +2:166,179c +3:166,179c + } + + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> Decoded<[T]?> { + if let source = source as? [AnyObject] { + var values = [T]() + for sourceValue in source { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): values.append(value) + case let .failure(error): return .failure(error) + } + } + return .success(values) + } else { + return .success(nil) +====1 +1:114,116c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> [Key:T]? { + if source is NSNull { + return nil +2:183,194c +3:183,194c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> Decoded<[Key:T]?> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): dictionary[key] = value + case let .failure(error): return .failure(error) + } + } + return .success(dictionary) + } else { + return .success(nil) +====1 +1:118,119c + return source.map { (someSource: AnyObject) -> [Key:T] in + Decoders.decode(clazz: clazz, source: someSource) +2:196,206c +3:196,206c + } + + static func decodeOptional(clazz: T, source: AnyObject) -> Decoded { + if let value = source as? U { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "String", actual: String(describing: type(of: source)))) +====1 +1:122a +2:210c +3:210c + +====1 +1:137c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Date in +2:225c +3:225c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:141c + return date +2:229c +3:229c + return .success(date) +====1 +1:145c + if let sourceInt = source as? Int64 { +2:233c +3:233c + if let sourceInt = source as? Int { +====1 +1:147c + return Date(timeIntervalSince1970: Double(sourceInt / 1000) ) +2:235,250c +3:235,250c + return .success(Date(timeIntervalSince1970: Double(sourceInt / 1000) )) + } + if source is String || source is Int { + return .failure(.parseError(message: "Could not decode date")) + } else { + return .failure(.typeMismatch(expected: "String or Int", actual: "\(source)")) + } + } + + // Decoder for ISOFullDate + Decoders.addDecoder(clazz: ISOFullDate.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let string = source as? String, + let isoDate = ISOFullDate.from(string: string) { + return .success(isoDate) + } else { + return .failure(.typeMismatch(expected: "ISO date", actual: "\(source)")) +====1 +1:149c + fatalError("formatter failed to parse \(source)") +2:251a +3:251a +====1 +1:152,155c + // Decoder for [AdditionalPropertiesClass] + Decoders.addDecoder(clazz: [AdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [AdditionalPropertiesClass] in + return Decoders.decode(clazz: [AdditionalPropertiesClass].self, source: source) + } +2:253a +3:253a +====1 +1:157,163c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> AdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + + result.mapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_property"] as AnyObject?) + result.mapOfMapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_of_map_property"] as AnyObject?) + return result +2:255,273c +3:255,273c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: [String:String].self, source: sourceDictionary["map_property"] as AnyObject?) { + + case let .success(value): result.mapProperty = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_of_map_property"] as AnyObject?) { + + case let .success(value): result.mapOfMapProperty = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "AdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:167,170c + // Decoder for [Animal] + Decoders.addDecoder(clazz: [Animal].self) { (source: AnyObject, instance: AnyObject?) -> [Animal] in + return Decoders.decode(clazz: [Animal].self, source: source) + } +2:276a +3:276a +====1 +1:172,176c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Animal in + let sourceDictionary = source as! [AnyHashable: Any] + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal" { + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) +2:278,299c +3:278,299c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal"{ + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) + } + let result = instance == nil ? Animal() : instance as! Animal + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Animal", actual: "\(source)")) +====1 +1:178,182c + let result = instance == nil ? Animal() : instance as! Animal + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + return result +2:300a +3:300a +====1 +1:186,194c + // Decoder for [AnimalFarm] + Decoders.addDecoder(clazz: [AnimalFarm].self) { (source: AnyObject, instance: AnyObject?) -> [AnimalFarm] in + return Decoders.decode(clazz: [AnimalFarm].self, source: source) + } + // Decoder for AnimalFarm + Decoders.addDecoder(clazz: AnimalFarm.self) { (source: AnyObject, instance: AnyObject?) -> AnimalFarm in + let sourceArray = source as! [AnyObject] + return sourceArray.map({ Decoders.decode(clazz: Animal.self, source: $0, instance: nil) }) + } +2:303a +3:303a +====1 +1:197,200c + // Decoder for [ApiResponse] + Decoders.addDecoder(clazz: [ApiResponse].self) { (source: AnyObject, instance: AnyObject?) -> [ApiResponse] in + return Decoders.decode(clazz: [ApiResponse].self, source: source) + } +2:306c +3:306c + +====1 +1:202,209c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> ApiResponse in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + + result.code = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) + result.type = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) + result.message = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) + return result +2:308,332c +3:308,332c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) { + + case let .success(value): result.code = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) { + + case let .success(value): result.type = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) { + + case let .success(value): result.message = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ApiResponse", actual: "\(source)")) + } +====1 +1:213,216c + // Decoder for [ArrayOfArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfArrayOfNumberOnly].self, source: source) + } +2:335a +3:335a +====1 +1:218,223c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + + result.arrayArrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) + return result +2:337,349c +3:337,349c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [[Double]].self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayArrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:227,230c + // Decoder for [ArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfNumberOnly].self, source: source) + } +2:352a +3:352a +====1 +1:232,237c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + + result.arrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayNumber"] as AnyObject?) + return result +2:354,366c +3:354,366c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [Double].self, source: sourceDictionary["ArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:241,244c + // Decoder for [ArrayTest] + Decoders.addDecoder(clazz: [ArrayTest].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayTest] in + return Decoders.decode(clazz: [ArrayTest].self, source: source) + } +2:369a +3:369a +====1 +1:246,253c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> ArrayTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + + result.arrayOfString = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_of_string"] as AnyObject?) + result.arrayArrayOfInteger = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) + result.arrayArrayOfModel = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_model"] as AnyObject?) + return result +2:371,395c +3:371,395c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["array_of_string"] as AnyObject?) { + + case let .success(value): result.arrayOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[Int64]].self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[ReadOnlyFirst]].self, source: sourceDictionary["array_array_of_model"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfModel = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayTest", actual: "\(source)")) + } +====1 +1:257,260c + // Decoder for [Capitalization] + Decoders.addDecoder(clazz: [Capitalization].self) { (source: AnyObject, instance: AnyObject?) -> [Capitalization] in + return Decoders.decode(clazz: [Capitalization].self, source: source) + } +2:398a +3:398a +====1 +1:262,272c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Capitalization in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Capitalization() : instance as! Capitalization + + result.smallCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) + result.capitalCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) + result.smallSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) + result.capitalSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) + result.sCAETHFlowPoints = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) + result.ATT_NAME = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) + return result +2:400,442c +3:400,442c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Capitalization() : instance as! Capitalization + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) { + + case let .success(value): result.smallCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) { + + case let .success(value): result.capitalCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) { + + case let .success(value): result.smallSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) { + + case let .success(value): result.capitalSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) { + + case let .success(value): result.sCAETHFlowPoints = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) { + + case let .success(value): result.ATT_NAME = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Capitalization", actual: "\(source)")) + } +====1 +1:276,279c + // Decoder for [Cat] + Decoders.addDecoder(clazz: [Cat].self) { (source: AnyObject, instance: AnyObject?) -> [Cat] in + return Decoders.decode(clazz: [Cat].self, source: source) + } +2:445a +3:445a +====1 +1:281,291c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Cat in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.declawed = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) + return result +2:447,474c +3:447,474c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) { + + case let .success(value): result.declawed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Cat", actual: "\(source)")) + } +====1 +1:295,298c + // Decoder for [Category] + Decoders.addDecoder(clazz: [Category].self) { (source: AnyObject, instance: AnyObject?) -> [Category] in + return Decoders.decode(clazz: [Category].self, source: source) + } +2:477a +3:477a +====1 +1:300,306c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Category in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Category() : instance as! Category + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:479,497c +3:479,497c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Category() : instance as! Category + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Category", actual: "\(source)")) + } +====1 +1:310,313c + // Decoder for [ClassModel] + Decoders.addDecoder(clazz: [ClassModel].self) { (source: AnyObject, instance: AnyObject?) -> [ClassModel] in + return Decoders.decode(clazz: [ClassModel].self, source: source) + } +2:500a +3:500a +====1 +1:315,320c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> ClassModel in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ClassModel() : instance as! ClassModel + + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) + return result +2:502,514c +3:502,514c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ClassModel() : instance as! ClassModel + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ClassModel", actual: "\(source)")) + } +====1 +1:324,327c + // Decoder for [Client] + Decoders.addDecoder(clazz: [Client].self) { (source: AnyObject, instance: AnyObject?) -> [Client] in + return Decoders.decode(clazz: [Client].self, source: source) + } +2:517a +3:517a +====1 +1:329,334c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Client in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Client() : instance as! Client + + result.client = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) + return result +2:519,531c +3:519,531c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Client() : instance as! Client + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) { + + case let .success(value): result.client = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Client", actual: "\(source)")) + } +====1 +1:338,341c + // Decoder for [Dog] + Decoders.addDecoder(clazz: [Dog].self) { (source: AnyObject, instance: AnyObject?) -> [Dog] in + return Decoders.decode(clazz: [Dog].self, source: source) + } +2:534a +3:534a +====1 +1:343,353c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Dog in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.breed = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) + return result +2:536,563c +3:536,563c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) { + + case let .success(value): result.breed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Dog", actual: "\(source)")) + } +====1 +1:357,360c + // Decoder for [EnumArrays] + Decoders.addDecoder(clazz: [EnumArrays].self) { (source: AnyObject, instance: AnyObject?) -> [EnumArrays] in + return Decoders.decode(clazz: [EnumArrays].self, source: source) + } +2:566a +3:566a +====1 +1:362,371c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> EnumArrays in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + + if let justSymbol = sourceDictionary["just_symbol"] as? String { + result.justSymbol = EnumArrays.JustSymbol(rawValue: (justSymbol)) + } + + if let arrayEnum = sourceDictionary["array_enum"] as? [String] { + result.arrayEnum = arrayEnum.map ({ EnumArrays.ArrayEnum(rawValue: $0)! }) +2:568,585c +3:568,585c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + switch Decoders.decodeOptional(clazz: EnumArrays.JustSymbol.self, source: sourceDictionary["just_symbol"] as AnyObject?) { + + case let .success(value): result.justSymbol = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_enum"] as AnyObject?) { + + case let .success(value): result.arrayEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumArrays", actual: "\(source)")) +====1 +1:373,374c + + return result +2:586a +3:586a +====1 +1:378,381c + // Decoder for [EnumClass] + Decoders.addDecoder(clazz: [EnumClass].self) { (source: AnyObject, instance: AnyObject?) -> [EnumClass] in + return Decoders.decode(clazz: [EnumClass].self, source: source) + } +2:589a +3:589a +====1 +1:383,389c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> EnumClass in + if let source = source as? String { + if let result = EnumClass(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type EnumClass: Maybe swagger file is insufficient") +2:591,593c +3:591,593c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: EnumClass.self, source: source, instance: instance) +====1 +1:393,396c + // Decoder for [EnumTest] + Decoders.addDecoder(clazz: [EnumTest].self) { (source: AnyObject, instance: AnyObject?) -> [EnumTest] in + return Decoders.decode(clazz: [EnumTest].self, source: source) + } +2:596a +3:596a +====1 +1:398,415c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> EnumTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumTest() : instance as! EnumTest + + if let enumString = sourceDictionary["enum_string"] as? String { + result.enumString = EnumTest.EnumString(rawValue: (enumString)) + } + + if let enumInteger = sourceDictionary["enum_integer"] as? Int32 { + result.enumInteger = EnumTest.EnumInteger(rawValue: (enumInteger)) + } + + if let enumNumber = sourceDictionary["enum_number"] as? Double { + result.enumNumber = EnumTest.EnumNumber(rawValue: (enumNumber)) + } + + result.outerEnum = Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) + return result +2:598,628c +3:598,628c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumTest() : instance as! EnumTest + switch Decoders.decodeOptional(clazz: EnumTest.EnumString.self, source: sourceDictionary["enum_string"] as AnyObject?) { + + case let .success(value): result.enumString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumInteger.self, source: sourceDictionary["enum_integer"] as AnyObject?) { + + case let .success(value): result.enumInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumNumber.self, source: sourceDictionary["enum_number"] as AnyObject?) { + + case let .success(value): result.enumNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) { + + case let .success(value): result.outerEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumTest", actual: "\(source)")) + } +====1 +1:419,422c + // Decoder for [FormatTest] + Decoders.addDecoder(clazz: [FormatTest].self) { (source: AnyObject, instance: AnyObject?) -> [FormatTest] in + return Decoders.decode(clazz: [FormatTest].self, source: source) + } +2:631a +3:631a +====1 +1:424,441c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> FormatTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? FormatTest() : instance as! FormatTest + + result.integer = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) + result.int32 = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) + result.int64 = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) + result.number = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) + result.float = Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) + result.double = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) + result.string = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) + result.byte = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) + result.binary = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) + result.date = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["date"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + return result +2:633,717c +3:633,717c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? FormatTest() : instance as! FormatTest + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) { + + case let .success(value): result.integer = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) { + + case let .success(value): result.int32 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) { + + case let .success(value): result.int64 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) { + + case let .success(value): result.number = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) { + + case let .success(value): result.float = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) { + + case let .success(value): result.double = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) { + + case let .success(value): result.string = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) { + + case let .success(value): result.byte = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) { + + case let .success(value): result.binary = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: ISOFullDate.self, source: sourceDictionary["date"] as AnyObject?) { + + case let .success(value): result.date = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "FormatTest", actual: "\(source)")) + } +====1 +1:445,448c + // Decoder for [HasOnlyReadOnly] + Decoders.addDecoder(clazz: [HasOnlyReadOnly].self) { (source: AnyObject, instance: AnyObject?) -> [HasOnlyReadOnly] in + return Decoders.decode(clazz: [HasOnlyReadOnly].self, source: source) + } +2:720a +3:720a +====1 +1:450,456c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> HasOnlyReadOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.foo = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) + return result +2:722,740c +3:722,740c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) { + + case let .success(value): result.foo = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "HasOnlyReadOnly", actual: "\(source)")) + } +====1 +1:460,463c + // Decoder for [List] + Decoders.addDecoder(clazz: [List].self) { (source: AnyObject, instance: AnyObject?) -> [List] in + return Decoders.decode(clazz: [List].self, source: source) + } +2:743a +3:743a +====1 +1:465,470c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> List in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? List() : instance as! List + + result._123List = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) + return result +2:745,757c +3:745,757c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? List() : instance as! List + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) { + + case let .success(value): result._123List = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "List", actual: "\(source)")) + } +====1 +1:474,477c + // Decoder for [MapTest] + Decoders.addDecoder(clazz: [MapTest].self) { (source: AnyObject, instance: AnyObject?) -> [MapTest] in + return Decoders.decode(clazz: [MapTest].self, source: source) + } +2:760a +3:760a +====1 +1:479,484c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> MapTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MapTest() : instance as! MapTest + + result.mapMapOfString = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_map_of_string"] as AnyObject?) + if let mapOfEnumString = sourceDictionary["map_of_enum_string"] as? [String:String] { //TODO: handle enum map scenario +2:762,779c +3:762,779c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MapTest() : instance as! MapTest + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_map_of_string"] as AnyObject?) { + + case let .success(value): result.mapMapOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: MapTest.MapOfEnumString.self, source: sourceDictionary["map_of_enum_string"] as AnyObject?) { + /* + case let .success(value): result.mapOfEnumString = value + case let .failure(error): return .failure(error) + */ default: break //TODO: handle enum map scenario + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MapTest", actual: "\(source)")) +====1 +1:486,487c + + return result +2:780a +3:780a +====1 +1:491,494c + // Decoder for [MixedPropertiesAndAdditionalPropertiesClass] + Decoders.addDecoder(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [MixedPropertiesAndAdditionalPropertiesClass] in + return Decoders.decode(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self, source: source) + } +2:783a +3:783a +====1 +1:496,503c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> MixedPropertiesAndAdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.map = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map"] as AnyObject?) + return result +2:785,809c +3:785,809c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:Animal].self, source: sourceDictionary["map"] as AnyObject?) { + + case let .success(value): result.map = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MixedPropertiesAndAdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:507,510c + // Decoder for [Model200Response] + Decoders.addDecoder(clazz: [Model200Response].self) { (source: AnyObject, instance: AnyObject?) -> [Model200Response] in + return Decoders.decode(clazz: [Model200Response].self, source: source) + } +2:812a +3:812a +====1 +1:512,518c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Model200Response in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Model200Response() : instance as! Model200Response + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) + return result +2:814,832c +3:814,832c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Model200Response() : instance as! Model200Response + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Model200Response", actual: "\(source)")) + } +====1 +1:522,525c + // Decoder for [Name] + Decoders.addDecoder(clazz: [Name].self) { (source: AnyObject, instance: AnyObject?) -> [Name] in + return Decoders.decode(clazz: [Name].self, source: source) + } +2:835a +3:835a +====1 +1:527,535c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Name in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Name() : instance as! Name + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result.snakeCase = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) + result.property = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) + result._123Number = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) + return result +2:837,867c +3:837,867c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Name() : instance as! Name + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) { + + case let .success(value): result.snakeCase = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) { + + case let .success(value): result.property = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) { + + case let .success(value): result._123Number = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Name", actual: "\(source)")) + } +====1 +1:539,542c + // Decoder for [NumberOnly] + Decoders.addDecoder(clazz: [NumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [NumberOnly] in + return Decoders.decode(clazz: [NumberOnly].self, source: source) + } +2:870a +3:870a +====1 +1:544,549c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> NumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + + result.justNumber = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) + return result +2:872,884c +3:872,884c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) { + + case let .success(value): result.justNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "NumberOnly", actual: "\(source)")) + } +====1 +1:553,556c + // Decoder for [Order] + Decoders.addDecoder(clazz: [Order].self) { (source: AnyObject, instance: AnyObject?) -> [Order] in + return Decoders.decode(clazz: [Order].self, source: source) + } +2:887a +3:887a +====1 +1:558,571c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Order in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Order() : instance as! Order + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.petId = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) + result.quantity = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) + result.shipDate = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Order.Status(rawValue: (status)) + } + + result.complete = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) + return result +2:889,931c +3:889,931c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Order() : instance as! Order + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) { + + case let .success(value): result.petId = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) { + + case let .success(value): result.quantity = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) { + + case let .success(value): result.shipDate = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Order.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) { + + case let .success(value): result.complete = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Order", actual: "\(source)")) + } +==== +1:575,578c + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } +2:935,946c + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } + ======= + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject, instance: AnyObject?) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } + >>>>>>> TEMP_RIGHT_BRANCH +3:934a +==== +1:580c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject) -> OuterBoolean in +2:948,954c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject) -> OuterBoolean in + ======= + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> OuterBoolean in + >>>>>>> TEMP_RIGHT_BRANCH +3:936c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:582c + return source +2:956,958c +3:938,940c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterBoolean", actual: "\(source)")) +====1 +1:584c + fatalError("Source \(source) is not convertible to typealias OuterBoolean: Maybe swagger file is insufficient") +2:959a +3:941a +==== +1:588,591c + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } +2:963,974c + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } + ======= + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject, instance: AnyObject?) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } + >>>>>>> TEMP_RIGHT_BRANCH +3:944a +==== +1:593,600c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + + let instance = OuterComposite() + instance.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + instance.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + instance.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return instance +2:976,1020c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + switch Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) { + + case let .success(value): result.myNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) { + + case let .success(value): result.myString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) { + + case let .success(value): result.myBoolean = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "OuterComposite", actual: "\(source)")) + } + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + + let instance = OuterComposite() + instance.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + instance.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + instance.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return instance + ======= + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + + result.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + result.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + result.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return result + >>>>>>> TEMP_RIGHT_BRANCH +3:946,970c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + switch Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) { + + case let .success(value): result.myNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) { + + case let .success(value): result.myString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) { + + case let .success(value): result.myBoolean = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "OuterComposite", actual: "\(source)")) + } +====1 +1:604,607c + // Decoder for [OuterEnum] + Decoders.addDecoder(clazz: [OuterEnum].self) { (source: AnyObject, instance: AnyObject?) -> [OuterEnum] in + return Decoders.decode(clazz: [OuterEnum].self, source: source) + } +2:1023a +3:973a +====1 +1:609,615c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> OuterEnum in + if let source = source as? String { + if let result = OuterEnum(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type OuterEnum: Maybe swagger file is insufficient") +2:1025,1027c +3:975,977c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: OuterEnum.self, source: source, instance: instance) +==== +1:619,622c + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + } +2:1031,1042c + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + } + ======= + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject, instance: AnyObject?) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + } + >>>>>>> TEMP_RIGHT_BRANCH +3:980a +==== +1:624c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject) -> OuterNumber in +2:1044,1050c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject) -> OuterNumber in + ======= + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> OuterNumber in + >>>>>>> TEMP_RIGHT_BRANCH +3:982c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:626c + return source +2:1052,1054c +3:984,986c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterNumber", actual: "\(source)")) +====1 +1:628c + fatalError("Source \(source) is not convertible to typealias OuterNumber: Maybe swagger file is insufficient") +2:1055a +3:987a +==== +1:632,635c + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } +2:1059,1070c + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } + ======= + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject, instance: AnyObject?) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } + >>>>>>> TEMP_RIGHT_BRANCH +3:990a +==== +1:637c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject) -> OuterString in +2:1072,1078c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject) -> OuterString in + ======= + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> OuterString in + >>>>>>> TEMP_RIGHT_BRANCH +3:992c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:639c + return source +2:1080,1082c +3:994,996c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterString", actual: "\(source)")) +====1 +1:641c + fatalError("Source \(source) is not convertible to typealias OuterString: Maybe swagger file is insufficient") +2:1083a +3:997a +====1 +1:645,648c + // Decoder for [Pet] + Decoders.addDecoder(clazz: [Pet].self) { (source: AnyObject, instance: AnyObject?) -> [Pet] in + return Decoders.decode(clazz: [Pet].self, source: source) + } +2:1086a +3:1000a +====1 +1:650,663c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Pet in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Pet() : instance as! Pet + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.category = Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + result.photoUrls = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["photoUrls"] as AnyObject?) + result.tags = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["tags"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Pet.Status(rawValue: (status)) + } + + return result +2:1088,1130c +3:1002,1044c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Pet() : instance as! Pet + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) { + + case let .success(value): result.category = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["photoUrls"] as AnyObject?) { + + case let .success(value): result.photoUrls = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [Tag].self, source: sourceDictionary["tags"] as AnyObject?) { + + case let .success(value): result.tags = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Pet.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Pet", actual: "\(source)")) + } +====1 +1:667,670c + // Decoder for [ReadOnlyFirst] + Decoders.addDecoder(clazz: [ReadOnlyFirst].self) { (source: AnyObject, instance: AnyObject?) -> [ReadOnlyFirst] in + return Decoders.decode(clazz: [ReadOnlyFirst].self, source: source) + } +2:1133a +3:1047a +====1 +1:672,678c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> ReadOnlyFirst in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.baz = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) + return result +2:1135,1153c +3:1049,1067c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) { + + case let .success(value): result.baz = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ReadOnlyFirst", actual: "\(source)")) + } +====1 +1:682,685c + // Decoder for [Return] + Decoders.addDecoder(clazz: [Return].self) { (source: AnyObject, instance: AnyObject?) -> [Return] in + return Decoders.decode(clazz: [Return].self, source: source) + } +2:1156a +3:1070a +====1 +1:687,692c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Return in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Return() : instance as! Return + + result._return = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) + return result +2:1158,1170c +3:1072,1084c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Return() : instance as! Return + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) { + + case let .success(value): result._return = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Return", actual: "\(source)")) + } +====1 +1:696,699c + // Decoder for [SpecialModelName] + Decoders.addDecoder(clazz: [SpecialModelName].self) { (source: AnyObject, instance: AnyObject?) -> [SpecialModelName] in + return Decoders.decode(clazz: [SpecialModelName].self, source: source) + } +2:1173a +3:1087a +====1 +1:701,706c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> SpecialModelName in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + + result.specialPropertyName = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) + return result +2:1175,1187c +3:1089,1101c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) { + + case let .success(value): result.specialPropertyName = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "SpecialModelName", actual: "\(source)")) + } +====1 +1:710,713c + // Decoder for [Tag] + Decoders.addDecoder(clazz: [Tag].self) { (source: AnyObject, instance: AnyObject?) -> [Tag] in + return Decoders.decode(clazz: [Tag].self, source: source) + } +2:1190a +3:1104a +====1 +1:715,721c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Tag in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Tag() : instance as! Tag + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:1192,1210c +3:1106,1124c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Tag() : instance as! Tag + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Tag", actual: "\(source)")) + } +====1 +1:725,728c + // Decoder for [User] + Decoders.addDecoder(clazz: [User].self) { (source: AnyObject, instance: AnyObject?) -> [User] in + return Decoders.decode(clazz: [User].self, source: source) + } +2:1213a +3:1127a +====1 +1:730,742c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> User in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? User() : instance as! User + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.username = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) + result.firstName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) + result.lastName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) + result.email = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + result.phone = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) + result.userStatus = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) + return result +2:1215,1269c +3:1129,1183c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? User() : instance as! User + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) { + + case let .success(value): result.username = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) { + + case let .success(value): result.firstName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) { + + case let .success(value): result.lastName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) { + + case let .success(value): result.email = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) { + + case let .success(value): result.phone = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) { + + case let .success(value): result.userStatus = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "User", actual: "\(source)")) + } +====1 +1:749c + } +\ No newline at end of file +2:1276c +3:1190c + } diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_patience/diff_PetApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_patience/diff_PetApi.php.txt new file mode 100644 index 0000000000..8a89185203 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_patience/diff_PetApi.php.txt @@ -0,0 +1,1662 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return PetApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->addPetWithHttpInfo($body); + return $response; +2:96c +3:96c + $this->addPetWithHttpInfo($body); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/pet"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/pet'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/json', 'application/xml']); +2:120,124c +3:120,124c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:140a +2:135c +3:135c + +====1 +1:142c + $httpBody = $formParams; // for HTTP post (form) +2:137,149c +3:137,149c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:143a +2:151,162c +3:151,162c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/json', 'application/xml'] + ); + } + +====1 +1:145,146c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:164,165c +3:164,165c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:148c + // make the API Call +2:167,188c +3:167,188c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:150,158c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet' + ); +2:189a +3:189a +====1 +1:160c + return [null, $statusCode, $httpHeader]; +2:191,213c +3:191,213c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:164c + +2:216a +3:216a +====1 +1:168c + +2:219a +3:219a +====1 +1:176a +2:228c +3:228c + * @throws \InvalidArgumentException +====1 +1:181,182c + list($response) = $this->deletePetWithHttpInfo($pet_id, $api_key); + return $response; +2:233c +3:233c + $this->deletePetWithHttpInfo($pet_id, $api_key); +====1 +1:192a +2:244c +3:244c + * @throws \InvalidArgumentException +====1 +1:201,203c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:253,255c +3:253,255c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:206,211c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:258,260c +3:258,260c + $httpBody = ''; + $multipart = false; + $returnType = ''; +====1 +1:215c + $headerParams['api_key'] = $this->apiClient->getSerializer()->toHeaderValue($api_key); +2:264c +3:264c + $headerParams['api_key'] = ObjectSerializer::toHeaderValue($api_key); +====1 +1:216a +2:266c +3:266c + +====1 +1:219,223c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:269c +3:269c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +==== +1:225c + +2:271,277c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:271,272c + + +====1 +1:228a +2:281c +3:276c + +====1 +1:230c + $httpBody = $formParams; // for HTTP post (form) +2:283,306c +3:278,301c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:231a +2:308c +3:303c + +====1 +1:233,234c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:310,311c +3:305,306c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:236c + // make the API Call +2:313,334c +3:308,329c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + +====1 +1:238,246c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet/{petId}' + ); +2:335a +3:330a +====1 +1:248c + return [null, $statusCode, $httpHeader]; +2:337,359c +3:332,354c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:252c + +2:362a +3:357a +====1 +1:256c + +2:365a +3:360a +====1 +1:263a +2:373c +3:368c + * @throws \InvalidArgumentException +====1 +1:278a +2:389c +3:384c + * @throws \InvalidArgumentException +====1 +1:287,289c + // parse inputs + $resourcePath = "/pet/findByStatus"; + $httpBody = ''; +2:398,400c +3:393,395c + + $resourcePath = '/pet/findByStatus'; + $formParams = []; +====1 +1:292,297c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:403,405c +3:398,400c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet[]'; +====1 +1:301c + $status = $this->apiClient->getSerializer()->serializeCollection($status, 'csv', true); +2:409c +3:404c + $status = ObjectSerializer::serializeCollection($status, 'csv', true); +====1 +1:304c + $queryParams['status'] = $this->apiClient->getSerializer()->toQueryValue($status); +2:412c +3:407c + $queryParams['status'] = ObjectSerializer::toQueryValue($status); +==== +1:306c + +2:414,421c + + <<<<<<< HEAD + + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:409,411c + + + +====1 +1:309a +2:425c +3:415c + +====1 +1:311c + $httpBody = $formParams; // for HTTP post (form) +2:427,450c +3:417,440c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:312a +2:452c +3:442c + +====1 +1:314,315c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:454,455c +3:444,445c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:317c + // make the API Call +2:457,478c +3:447,468c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:319,327c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet[]', + '/pet/findByStatus' + ); +2:479a +3:469a +====1 +1:329c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet[]', $httpHeader), $statusCode, $httpHeader]; +2:481,517c +3:471,507c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:333c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +2:521c +3:511c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +====1 +1:337c + +2:524a +3:514a +====1 +1:341c + +2:527a +3:517a +====1 +1:348a +2:535c +3:525c + * @throws \InvalidArgumentException +====1 +1:363a +2:551c +3:541c + * @throws \InvalidArgumentException +====1 +1:372,374c + // parse inputs + $resourcePath = "/pet/findByTags"; + $httpBody = ''; +2:560,562c +3:550,552c + + $resourcePath = '/pet/findByTags'; + $formParams = []; +====1 +1:377,382c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:565,567c +3:555,557c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet[]'; +====1 +1:386c + $tags = $this->apiClient->getSerializer()->serializeCollection($tags, 'csv', true); +2:571c +3:561c + $tags = ObjectSerializer::serializeCollection($tags, 'csv', true); +====1 +1:389c + $queryParams['tags'] = $this->apiClient->getSerializer()->toQueryValue($tags); +2:574c +3:564c + $queryParams['tags'] = ObjectSerializer::toQueryValue($tags); +==== +1:391c + +2:576,583c + + <<<<<<< HEAD + + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:566,568c + + + +====1 +1:394a +2:587c +3:572c + +====1 +1:396c + $httpBody = $formParams; // for HTTP post (form) +2:589,601c +3:574,586c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:397a +2:603,614c +3:588,599c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + +====1 +1:399,400c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:616,617c +3:601,602c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:402c + // make the API Call +2:619,640c +3:604,625c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:404,412c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet[]', + '/pet/findByTags' + ); +2:641a +3:626a +====1 +1:414c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet[]', $httpHeader), $statusCode, $httpHeader]; +2:643,679c +3:628,664c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:418c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +2:683c +3:668c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +====1 +1:422c + +2:686a +3:671a +====1 +1:426c + +2:689a +3:674a +====1 +1:433a +2:697c +3:682c + * @throws \InvalidArgumentException +====1 +1:448a +2:713c +3:698c + * @throws \InvalidArgumentException +====1 +1:457,459c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:722,724c +3:707,709c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:462,467c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:727,730c +3:712,715c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet'; + +====1 +1:471,475c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:734c +3:719c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +==== +1:477c + +2:736,742c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:721,722c + + +====1 +1:480a +2:746c +3:726c + +====1 +1:482c + $httpBody = $formParams; // for HTTP post (form) +2:748,760c +3:728,740c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:484,487c + // this endpoint requires API key authentication + $apiKey = $this->apiClient->getApiKeyWithPrefix('api_key'); + if (strlen($apiKey) !== 0) { + $headerParams['api_key'] = $apiKey; +2:762,771c +3:742,751c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:489c + // make the API Call +2:773,800c +3:753,780c + + // this endpoint requires API key authentication + $apiKey = $this->config->getApiKeyWithPrefix('api_key'); + if ($apiKey !== null) { + $headers['api_key'] = $apiKey; + } + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:491,499c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet', + '/pet/{petId}' + ); +2:801a +3:781a +====1 +1:501c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet', $httpHeader), $statusCode, $httpHeader]; +2:803,839c +3:783,819c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:505c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet', $e->getResponseHeaders()); +2:843c +3:823c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet', $e->getResponseHeaders()); +====1 +1:509c + +2:846a +3:826a +====1 +1:513c + +2:849a +3:829a +====1 +1:520a +2:857c +3:837c + * @throws \InvalidArgumentException +====1 +1:525,526c + list($response) = $this->updatePetWithHttpInfo($body); + return $response; +2:862c +3:842c + $this->updatePetWithHttpInfo($body); +====1 +1:535a +2:872c +3:852c + * @throws \InvalidArgumentException +====1 +1:544,546c + // parse inputs + $resourcePath = "/pet"; + $httpBody = ''; +2:881,883c +3:861,863c + + $resourcePath = '/pet'; + $formParams = []; +====1 +1:549,554c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/json', 'application/xml']); +2:886,890c +3:866,870c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:564a +2:901c +3:881c + +====1 +1:566c + $httpBody = $formParams; // for HTTP post (form) +2:903,915c +3:883,895c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:567a +2:917,928c +3:897,908c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/json', 'application/xml'] + ); + } + +====1 +1:569,570c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:930,931c +3:910,911c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:572c + // make the API Call +2:933,954c +3:913,934c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'PUT', + $url, + $headers, + $httpBody + ); + +====1 +1:574,582c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'PUT', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet' + ); +2:955a +3:935a +====1 +1:584c + return [null, $statusCode, $httpHeader]; +2:957,979c +3:937,959c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:588c + +2:982a +3:962a +====1 +1:592c + +2:985a +3:965a +====1 +1:601a +2:995c +3:975c + * @throws \InvalidArgumentException +====1 +1:606,607c + list($response) = $this->updatePetWithFormWithHttpInfo($pet_id, $name, $status); + return $response; +2:1000c +3:980c + $this->updatePetWithFormWithHttpInfo($pet_id, $name, $status); +====1 +1:618a +2:1012c +3:992c + * @throws \InvalidArgumentException +====1 +1:627,629c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:1021,1023c +3:1001,1003c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:632,637c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/x-www-form-urlencoded']); +2:1026,1029c +3:1006,1009c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:641,645c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:1033c +3:1013c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +====1 +1:646a +2:1035c +3:1015c + +====1 +1:649c + $formParams['name'] = $this->apiClient->getSerializer()->toFormValue($name); +2:1038c +3:1018c + $formParams['name'] = ObjectSerializer::toFormValue($name); +====1 +1:653c + $formParams['status'] = $this->apiClient->getSerializer()->toFormValue($status); +2:1042c +3:1022c + $formParams['status'] = ObjectSerializer::toFormValue($status); +====1 +1:655c + +2:1044c +3:1024c + +====1 +1:658a +2:1048c +3:1028c + +====1 +1:660c + $httpBody = $formParams; // for HTTP post (form) +2:1050,1073c +3:1030,1053c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/x-www-form-urlencoded'] + ); +====1 +1:661a +2:1075c +3:1055c + +====1 +1:663,664c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:1077,1078c +3:1057,1058c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:666c + // make the API Call +2:1080,1101c +3:1060,1081c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:668,676c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet/{petId}' + ); +2:1102a +3:1082a +====1 +1:678c + return [null, $statusCode, $httpHeader]; +2:1104,1126c +3:1084,1106c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:682c + +2:1129a +3:1109a +====1 +1:686c + +2:1132a +3:1112a +====1 +1:695a +2:1142c +3:1122c + * @throws \InvalidArgumentException +====1 +1:712a +2:1160c +3:1140c + * @throws \InvalidArgumentException +====1 +1:721,723c + // parse inputs + $resourcePath = "/pet/{petId}/uploadImage"; + $httpBody = ''; +2:1169,1171c +3:1149,1151c + + $resourcePath = '/pet/{petId}/uploadImage'; + $formParams = []; +====1 +1:726,731c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['multipart/form-data']); +2:1174,1177c +3:1154,1157c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\ApiResponse'; + +====1 +1:735,739c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:1181c +3:1161c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +====1 +1:740a +2:1183c +3:1163c + +====1 +1:743c + $formParams['additionalMetadata'] = $this->apiClient->getSerializer()->toFormValue($additional_metadata); +2:1186c +3:1166c + $formParams['additionalMetadata'] = ObjectSerializer::toFormValue($additional_metadata); +====1 +1:747,753c + // PHP 5.5 introduced a CurlFile object that deprecates the old @filename syntax + // See: https://wiki.php.net/rfc/curl-file-upload + if (function_exists('curl_file_create')) { + $formParams['file'] = curl_file_create($this->apiClient->getSerializer()->toFormValue($file)); + } else { + $formParams['file'] = '@' . $this->apiClient->getSerializer()->toFormValue($file); + } +2:1190,1191c +3:1170,1171c + $multipart = true; + $formParams['file'] = \GuzzleHttp\Psr7\try_fopen(ObjectSerializer::toFormValue($file), 'rb'); +====1 +1:755c + +2:1193c +3:1173c + +====1 +1:758a +2:1197c +3:1177c + +====1 +1:760c + $httpBody = $formParams; // for HTTP post (form) +2:1199,1211c +3:1179,1191c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:761a +2:1213,1224c +3:1193,1204c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/json'], + ['multipart/form-data'] + ); + } + +====1 +1:763,764c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:1226,1227c +3:1206,1207c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:766c + // make the API Call +2:1229,1250c +3:1209,1230c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:768,776c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\ApiResponse', + '/pet/{petId}/uploadImage' + ); +2:1251a +3:1231a +====1 +1:778c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\ApiResponse', $httpHeader), $statusCode, $httpHeader]; +2:1253,1289c +3:1233,1269c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:782c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\ApiResponse', $e->getResponseHeaders()); +2:1293c +3:1273c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\ApiResponse', $e->getResponseHeaders()); +====1 +1:786c + +2:1296a +3:1276a diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_patience/diff_StoreApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_patience/diff_StoreApi.php.txt new file mode 100644 index 0000000000..f8e1e3ce2b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_patience/diff_StoreApi.php.txt @@ -0,0 +1,830 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return StoreApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->deleteOrderWithHttpInfo($order_id); + return $response; +2:96c +3:96c + $this->deleteOrderWithHttpInfo($order_id); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/store/order/{order_id}"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/store/order/{order_id}'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:120,123c +3:120,123c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:134,138c + $resourcePath = str_replace( + "{" . "order_id" . "}", + $this->apiClient->getSerializer()->toPathValue($order_id), + $resourcePath + ); +2:127c +3:127c + $resourcePath = str_replace('{' . 'order_id' . '}', ObjectSerializer::toPathValue($order_id), $resourcePath); +==== +1:140c + +2:129,135c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:129,130c + + +====1 +1:143a +2:139c +3:134c + +====1 +1:145c + $httpBody = $formParams; // for HTTP post (form) +2:141,153c +3:136,148c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:147,156c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/store/order/{order_id}' +2:155,163c +3:150,158c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:157a +2:165,212c +3:160,207c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:159c + return [null, $statusCode, $httpHeader]; +2:213a +3:208a +====1 +1:163c + +2:216a +3:211a +====1 +1:167c + +2:219a +3:214a +====1 +1:173a +2:226c +3:221c + * @throws \InvalidArgumentException +====1 +1:187a +2:241c +3:236c + * @throws \InvalidArgumentException +====1 +1:192,194c + // parse inputs + $resourcePath = "/store/inventory"; + $httpBody = ''; +2:246,248c +3:241,243c + + $resourcePath = '/store/inventory'; + $formParams = []; +====1 +1:197,202c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:251,256c +3:246,251c + $httpBody = ''; + $multipart = false; + $returnType = 'map[string,int]'; + + + +====1 +1:204c + +2:257a +3:252a +====1 +1:207a +2:261c +3:256c + +====1 +1:209c + $httpBody = $formParams; // for HTTP post (form) +2:263,275c +3:258,270c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:211,214c + // this endpoint requires API key authentication + $apiKey = $this->apiClient->getApiKeyWithPrefix('api_key'); + if (strlen($apiKey) !== 0) { + $headerParams['api_key'] = $apiKey; +2:277,286c +3:272,281c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/json'], + [] + ); +====1 +1:216c + // make the API Call +2:288,315c +3:283,310c + + // this endpoint requires API key authentication + $apiKey = $this->config->getApiKeyWithPrefix('api_key'); + if ($apiKey !== null) { + $headers['api_key'] = $apiKey; + } + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:218,226c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + 'map[string,int]', + '/store/inventory' + ); +2:316a +3:311a +====1 +1:228c + return [$this->apiClient->getSerializer()->deserialize($response, 'map[string,int]', $httpHeader), $statusCode, $httpHeader]; +2:318,354c +3:313,349c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:232c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), 'map[string,int]', $e->getResponseHeaders()); +2:358c +3:353c + $data = ObjectSerializer::deserialize($e->getResponseBody(), 'map[string,int]', $e->getResponseHeaders()); +====1 +1:236c + +2:361a +3:356a +====1 +1:240c + +2:364a +3:359a +====1 +1:247a +2:372c +3:367c + * @throws \InvalidArgumentException +====1 +1:262a +2:388c +3:383c + * @throws \InvalidArgumentException +====1 +1:271c + if (($order_id > 5)) { +2:397c +3:392c + if ($order_id > 5) { +====1 +1:274c + if (($order_id < 1)) { +2:400c +3:395c + if ($order_id < 1) { +====1 +1:278,280c + // parse inputs + $resourcePath = "/store/order/{order_id}"; + $httpBody = ''; +2:404,406c +3:399,401c + + $resourcePath = '/store/order/{order_id}'; + $formParams = []; +====1 +1:283,288c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:409,412c +3:404,407c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Order'; + +====1 +1:292,296c + $resourcePath = str_replace( + "{" . "order_id" . "}", + $this->apiClient->getSerializer()->toPathValue($order_id), + $resourcePath + ); +2:416c +3:411c + $resourcePath = str_replace('{' . 'order_id' . '}', ObjectSerializer::toPathValue($order_id), $resourcePath); +==== +1:298c + +2:418,424c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:413,414c + + +====1 +1:301a +2:428c +3:418c + +====1 +1:303c + $httpBody = $formParams; // for HTTP post (form) +2:430,442c +3:420,432c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:305,314c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Order', + '/store/order/{order_id}' +2:444,452c +3:434,442c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:315a +2:454,515c +3:444,505c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:317c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Order', $httpHeader), $statusCode, $httpHeader]; +2:516a +3:506a +====1 +1:321c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +2:520c +3:510c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +====1 +1:325c + +2:523a +3:513a +====1 +1:329c + +2:526a +3:516a +====1 +1:336a +2:534c +3:524c + * @throws \InvalidArgumentException +====1 +1:351a +2:550c +3:540c + * @throws \InvalidArgumentException +====1 +1:360,362c + // parse inputs + $resourcePath = "/store/order"; + $httpBody = ''; +2:559,561c +3:549,551c + + $resourcePath = '/store/order'; + $formParams = []; +====1 +1:365,370c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:564,568c +3:554,558c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Order'; + + +====1 +1:380a +2:579c +3:569c + +====1 +1:382c + $httpBody = $formParams; // for HTTP post (form) +2:581,593c +3:571,583c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:384,393c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Order', + '/store/order' +2:595,598c +3:585,588c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:394a +2:600,666c +3:590,656c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:396c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Order', $httpHeader), $statusCode, $httpHeader]; +2:667a +3:657a +====1 +1:400c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +2:671c +3:661c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +====1 +1:404c + +2:674a +3:664a diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_patience/diff_UserApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_patience/diff_UserApi.php.txt new file mode 100644 index 0000000000..49eb133815 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_patience/diff_UserApi.php.txt @@ -0,0 +1,1453 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return UserApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->createUserWithHttpInfo($body); + return $response; +2:96c +3:96c + $this->createUserWithHttpInfo($body); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/user"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/user'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:120,124c +3:120,124c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:140a +2:135c +3:135c + +====1 +1:142c + $httpBody = $formParams; // for HTTP post (form) +2:137,149c +3:137,149c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:144,153c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user' +2:151,154c +3:151,154c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:154a +2:156,208c +3:156,208c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:156c + return [null, $statusCode, $httpHeader]; +2:209a +3:209a +====1 +1:160c + +2:212a +3:212a +====1 +1:164c + +2:215a +3:215a +====1 +1:171a +2:223c +3:223c + * @throws \InvalidArgumentException +====1 +1:176,177c + list($response) = $this->createUsersWithArrayInputWithHttpInfo($body); + return $response; +2:228c +3:228c + $this->createUsersWithArrayInputWithHttpInfo($body); +====1 +1:186a +2:238c +3:238c + * @throws \InvalidArgumentException +====1 +1:195,197c + // parse inputs + $resourcePath = "/user/createWithArray"; + $httpBody = ''; +2:247,249c +3:247,249c + + $resourcePath = '/user/createWithArray'; + $formParams = []; +====1 +1:200,205c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:252,256c +3:252,256c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:215a +2:267c +3:267c + +====1 +1:217c + $httpBody = $formParams; // for HTTP post (form) +2:269,281c +3:269,281c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:219,228c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/createWithArray' +2:283,291c +3:283,291c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:229a +2:293,340c +3:293,340c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:231c + return [null, $statusCode, $httpHeader]; +2:341a +3:341a +====1 +1:235c + +2:344a +3:344a +====1 +1:239c + +2:347a +3:347a +====1 +1:246a +2:355c +3:355c + * @throws \InvalidArgumentException +====1 +1:251,252c + list($response) = $this->createUsersWithListInputWithHttpInfo($body); + return $response; +2:360c +3:360c + $this->createUsersWithListInputWithHttpInfo($body); +====1 +1:261a +2:370c +3:370c + * @throws \InvalidArgumentException +====1 +1:270,272c + // parse inputs + $resourcePath = "/user/createWithList"; + $httpBody = ''; +2:379,381c +3:379,381c + + $resourcePath = '/user/createWithList'; + $formParams = []; +====1 +1:275,280c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:384,388c +3:384,388c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:290a +2:399c +3:399c + +====1 +1:292c + $httpBody = $formParams; // for HTTP post (form) +2:401,413c +3:401,413c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:294,303c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/createWithList' +2:415,423c +3:415,423c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:304a +2:425,472c +3:425,472c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:306c + return [null, $statusCode, $httpHeader]; +2:473a +3:473a +====1 +1:310c + +2:476a +3:476a +====1 +1:314c + +2:479a +3:479a +====1 +1:321a +2:487c +3:487c + * @throws \InvalidArgumentException +====1 +1:326,327c + list($response) = $this->deleteUserWithHttpInfo($username); + return $response; +2:492c +3:492c + $this->deleteUserWithHttpInfo($username); +====1 +1:336a +2:502c +3:502c + * @throws \InvalidArgumentException +====1 +1:345,347c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:511,513c +3:511,513c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:350,355c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:516,519c +3:516,519c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:359,363c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:523c +3:523c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +==== +1:365c + +2:525,531c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:525,526c + + +====1 +1:368a +2:535c +3:530c + +====1 +1:370c + $httpBody = $formParams; // for HTTP post (form) +2:537,549c +3:532,544c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:372,381c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/{username}' +2:551,559c +3:546,554c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:382a +2:561,608c +3:556,603c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:384c + return [null, $statusCode, $httpHeader]; +2:609a +3:604a +====1 +1:388c + +2:612a +3:607a +====1 +1:392c + +2:615a +3:610a +====1 +1:399a +2:623c +3:618c + * @throws \InvalidArgumentException +====1 +1:414a +2:639c +3:634c + * @throws \InvalidArgumentException +====1 +1:423,425c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:648,650c +3:643,645c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:428,433c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:653,656c +3:648,651c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\User'; + +====1 +1:437,441c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:660c +3:655c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +==== +1:443c + +2:662,668c + + <<<<<<< HEAD + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:657,658c + + +====1 +1:446a +2:672c +3:662c + +====1 +1:448c + $httpBody = $formParams; // for HTTP post (form) +2:674,686c +3:664,676c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:450,459c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\User', + '/user/{username}' +2:688,691c +3:678,681c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:460a +2:693,759c +3:683,749c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:462c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\User', $httpHeader), $statusCode, $httpHeader]; +2:760a +3:750a +====1 +1:466c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\User', $e->getResponseHeaders()); +2:764c +3:754c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\User', $e->getResponseHeaders()); +====1 +1:470c + +2:767a +3:757a +====1 +1:474c + +2:770a +3:760a +====1 +1:482a +2:779c +3:769c + * @throws \InvalidArgumentException +====1 +1:498a +2:796c +3:786c + * @throws \InvalidArgumentException +====1 +1:511,513c + // parse inputs + $resourcePath = "/user/login"; + $httpBody = ''; +2:809,811c +3:799,801c + + $resourcePath = '/user/login'; + $formParams = []; +====1 +1:516,521c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:814,816c +3:804,806c + $httpBody = ''; + $multipart = false; + $returnType = 'string'; +====1 +1:525c + $queryParams['username'] = $this->apiClient->getSerializer()->toQueryValue($username); +2:820c +3:810c + $queryParams['username'] = ObjectSerializer::toQueryValue($username); +====1 +1:529c + $queryParams['password'] = $this->apiClient->getSerializer()->toQueryValue($password); +2:824c +3:814c + $queryParams['password'] = ObjectSerializer::toQueryValue($password); +==== +1:531c + +2:826,833c + + <<<<<<< HEAD + + + ||||||| 4479382ced + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:816,818c + + + +====1 +1:534a +2:837c +3:822c + +====1 +1:536c + $httpBody = $formParams; // for HTTP post (form) +2:839,851c +3:824,836c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:538,547c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + 'string', + '/user/login' +2:853,861c +3:838,846c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:548a +2:863,924c +3:848,909c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:550c + return [$this->apiClient->getSerializer()->deserialize($response, 'string', $httpHeader), $statusCode, $httpHeader]; +2:925a +3:910a +====1 +1:554c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), 'string', $e->getResponseHeaders()); +2:929c +3:914c + $data = ObjectSerializer::deserialize($e->getResponseBody(), 'string', $e->getResponseHeaders()); +====1 +1:558c + +2:932a +3:917a +====1 +1:562c + +2:935a +3:920a +====1 +1:568a +2:942c +3:927c + * @throws \InvalidArgumentException +====1 +1:573,574c + list($response) = $this->logoutUserWithHttpInfo(); + return $response; +2:947c +3:932c + $this->logoutUserWithHttpInfo(); +====1 +1:582a +2:956c +3:941c + * @throws \InvalidArgumentException +====1 +1:587,589c + // parse inputs + $resourcePath = "/user/logout"; + $httpBody = ''; +2:961,963c +3:946,948c + + $resourcePath = '/user/logout'; + $formParams = []; +====1 +1:592,597c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:966,971c +3:951,956c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + + +====1 +1:599c + +2:972a +3:957a +====1 +1:602a +2:976c +3:961c + +====1 +1:604c + $httpBody = $formParams; // for HTTP post (form) +2:978,990c +3:963,975c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:606,615c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/logout' +2:992,995c +3:977,980c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:616a +2:997,1049c +3:982,1034c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:618c + return [null, $statusCode, $httpHeader]; +2:1050a +3:1035a +====1 +1:622c + +2:1053a +3:1038a +====1 +1:626c + +2:1056a +3:1041a +====1 +1:634a +2:1065c +3:1050c + * @throws \InvalidArgumentException +====1 +1:639,640c + list($response) = $this->updateUserWithHttpInfo($username, $body); + return $response; +2:1070c +3:1055c + $this->updateUserWithHttpInfo($username, $body); +====1 +1:650a +2:1081c +3:1066c + * @throws \InvalidArgumentException +====1 +1:663,665c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:1094,1096c +3:1079,1081c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:668,673c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:1099,1102c +3:1084,1087c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:677,681c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:1106c +3:1091c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +====1 +1:682a +2:1108c +3:1093c + +====1 +1:691a +2:1118c +3:1103c + +====1 +1:693c + $httpBody = $formParams; // for HTTP post (form) +2:1120,1132c +3:1105,1117c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:695,704c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'PUT', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/{username}' +2:1134,1142c +3:1119,1127c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:705a +2:1144,1191c +3:1129,1176c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'PUT', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:707c + return [null, $statusCode, $httpHeader]; +2:1192a +3:1177a +====1 +1:711c + +2:1195a +3:1180a diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_patience/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_patience/diff_VERSION.txt new file mode 100644 index 0000000000..ce107c7353 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_patience/diff_VERSION.txt @@ -0,0 +1,10 @@ +1,6c1 +< <<<<<<< HEAD +< 2.3.0-SNAPSHOT +< ||||||| 4479382ced +< ======= +< 2.2.3-SNAPSHOT +< >>>>>>> TEMP_RIGHT_BRANCH +--- +> 2.3.0-SNAPSHOT +\ No newline at end of file diff --git a/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_patience/diff_io.swagger.codegen.CodegenConfig.txt b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_patience/diff_io.swagger.codegen.CodegenConfig.txt new file mode 100644 index 0000000000..c523742933 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/gitmerge_recursive_patience/diff_io.swagger.codegen.CodegenConfig.txt @@ -0,0 +1,20 @@ +==== +1:2c + io.swagger.codegen.languages.AspNet5ServerCodegen +2:2,8c + <<<<<<< HEAD + ||||||| 4479382ced + io.swagger.codegen.languages.AspNet5ServerCodegen + ======= + io.swagger.codegen.languages.ApexClientCodegen + io.swagger.codegen.languages.AspNet5ServerCodegen + >>>>>>> TEMP_RIGHT_BRANCH +3:2c + io.swagger.codegen.languages.ApexClientCodegen +====3 +1:67c +2:73c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen +\ No newline at end of file +3:67c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen diff --git a/src/python/merge_conflict_analysis_diffs/1897/intellimerge/diff_Models.swift.txt b/src/python/merge_conflict_analysis_diffs/1897/intellimerge/diff_Models.swift.txt new file mode 100644 index 0000000000..1deb823d65 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/intellimerge/diff_Models.swift.txt @@ -0,0 +1,2452 @@ +====1 +1:14c + case Error(Int, Data?, Error) +2:14,15c +3:14,15c + case HttpError(statusCode: Int, data: Data?, error: Error) + case DecodeError(response: Data?, decodeError: DecodeError) +====1 +1:37a +2:39,60c +3:39,60c + public enum Decoded { + case success(ValueType) + case failure(DecodeError) + } + + public extension Decoded { + var value: ValueType? { + switch self { + case let .success(value): + return value + case .failure: + return nil + } + } + } + + public enum DecodeError { + case typeMismatch(expected: String, actual: String) + case missingKey(key: String) + case parseError(message: String) + } + +====1 +1:42c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> T)) { +2:65c +3:65c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> Decoded)) { +====1 +1:47,50c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> T { + let key = discriminator; + if let decoder = decoders[key] { + return decoder(source, nil) as! T +2:70,73c +3:70,73c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> Decoded { + let key = discriminator + if let decoder = decoders[key], let value = decoder(source, nil) as? Decoded { + return value +====1 +1:52c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:75c +3:75c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:56,58c + static func decode(clazz: [T].Type, source: AnyObject) -> [T] { + let array = source as! [AnyObject] + return array.map { Decoders.decode(clazz: T.self, source: $0, instance: nil) } +2:79,93c +3:79,93c + static func decode(clazz: [T].Type, source: AnyObject) -> Decoded<[T]> { + if let sourceArray = source as? [AnyObject] { + var values = [T]() + for sourceValue in sourceArray { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): + values.append(value) + case let .failure(error): + return .failure(error) + } + } + return .success(values) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } +====1 +1:61,65c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> [Key:T] { + let sourceDictionary = source as! [Key: AnyObject] + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + dictionary[key] = Decoders.decode(clazz: T.self, source: value, instance: nil) +2:96,122c +3:96,122c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> Decoded<[Key:T]> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): + dictionary[key] = value + case let .failure(error): + return .failure(error) + } + } + return .success(dictionary) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } + } + + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + guard !(source is NSNull), source != nil else { return .success(nil) } + if let value = source as? T.RawValue { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "\(T.RawValue.self) matching a case from the enumeration \(T.self)", actual: String(describing: type(of: source)))) +====1 +1:67c + return dictionary +2:123a +3:123a +====1 +1:70c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> T { +2:126c +3:126c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> Decoded { +====1 +1:72,73c + if T.self is Int32.Type && source is NSNumber { + return (source as! NSNumber).int32Value as! T; +2:128,129c +3:128,129c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int32.Type { + return .success(value) +====1 +1:75,76c + if T.self is Int64.Type && source is NSNumber { + return source.int64Value as! T; +2:131,132c +3:131,132c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int64.Type { + return .success(value) +====1 +1:78,79c + if T.self is UUID.Type && source is String { + return UUID(uuidString: source as! String) as! T +2:134,135c +3:134,135c + if let intermediate = source as? String, let value = UUID(uuidString: intermediate) as? T, source is String, T.self is UUID.Type { + return .success(value) +====1 +1:81,82c + if source is T { + return source as! T +2:137,138c +3:137,138c + if let value = source as? T { + return .success(value) +====1 +1:84,85c + if T.self is Data.Type && source is String { + return Data(base64Encoded: source as! String) as! T +2:140,141c +3:140,141c + if let intermediate = source as? String, let value = Data(base64Encoded: intermediate) as? T { + return .success(value) +====1 +1:89,90c + if let decoder = decoders[key] { + return decoder(source, instance) as! T +2:145,146c +3:145,146c + if let decoder = decoders[key], let value = decoder(source, instance) as? Decoded { + return value +====1 +1:92c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:148c +3:148c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:96,102c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> T? { + if source is NSNull { + return nil + } + return source.map { (source: AnyObject) -> T in + Decoders.decode(clazz: clazz, source: source, instance: nil) + } +2:152,154c +3:152,154c + //Convert a Decoded so that its value is optional. DO WE STILL NEED THIS? + static func toOptional(decoded: Decoded) -> Decoded { + return .success(decoded.value) +====1 +1:105,107c + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> [T]? { + if source is NSNull { + return nil +2:157,164c +3:157,164c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + if let source = source, !(source is NSNull) { + switch Decoders.decode(clazz: clazz, source: source, instance: nil) { + case let .success(value): return .success(value) + case let .failure(error): return .failure(error) + } + } else { + return .success(nil) +====1 +1:109,110c + return source.map { (someSource: AnyObject) -> [T] in + Decoders.decode(clazz: clazz, source: someSource) +2:166,179c +3:166,179c + } + + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> Decoded<[T]?> { + if let source = source as? [AnyObject] { + var values = [T]() + for sourceValue in source { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): values.append(value) + case let .failure(error): return .failure(error) + } + } + return .success(values) + } else { + return .success(nil) +====1 +1:114,116c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> [Key:T]? { + if source is NSNull { + return nil +2:183,194c +3:183,194c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> Decoded<[Key:T]?> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): dictionary[key] = value + case let .failure(error): return .failure(error) + } + } + return .success(dictionary) + } else { + return .success(nil) +====1 +1:118,119c + return source.map { (someSource: AnyObject) -> [Key:T] in + Decoders.decode(clazz: clazz, source: someSource) +2:196,206c +3:196,206c + } + + static func decodeOptional(clazz: T, source: AnyObject) -> Decoded { + if let value = source as? U { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "String", actual: String(describing: type(of: source)))) +====1 +1:122a +2:210c +3:210c + +====1 +1:137c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Date in +2:225c +3:225c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:141c + return date +2:229c +3:229c + return .success(date) +====1 +1:145c + if let sourceInt = source as? Int64 { +2:233c +3:233c + if let sourceInt = source as? Int { +====1 +1:147c + return Date(timeIntervalSince1970: Double(sourceInt / 1000) ) +2:235,250c +3:235,250c + return .success(Date(timeIntervalSince1970: Double(sourceInt / 1000) )) + } + if source is String || source is Int { + return .failure(.parseError(message: "Could not decode date")) + } else { + return .failure(.typeMismatch(expected: "String or Int", actual: "\(source)")) + } + } + + // Decoder for ISOFullDate + Decoders.addDecoder(clazz: ISOFullDate.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let string = source as? String, + let isoDate = ISOFullDate.from(string: string) { + return .success(isoDate) + } else { + return .failure(.typeMismatch(expected: "ISO date", actual: "\(source)")) +====1 +1:149c + fatalError("formatter failed to parse \(source)") +2:251a +3:251a +====1 +1:152,155c + // Decoder for [AdditionalPropertiesClass] + Decoders.addDecoder(clazz: [AdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [AdditionalPropertiesClass] in + return Decoders.decode(clazz: [AdditionalPropertiesClass].self, source: source) + } +2:253a +3:253a +====1 +1:157,163c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> AdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + + result.mapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_property"] as AnyObject?) + result.mapOfMapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_of_map_property"] as AnyObject?) + return result +2:255,273c +3:255,273c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: [String:String].self, source: sourceDictionary["map_property"] as AnyObject?) { + + case let .success(value): result.mapProperty = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_of_map_property"] as AnyObject?) { + + case let .success(value): result.mapOfMapProperty = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "AdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:167,170c + // Decoder for [Animal] + Decoders.addDecoder(clazz: [Animal].self) { (source: AnyObject, instance: AnyObject?) -> [Animal] in + return Decoders.decode(clazz: [Animal].self, source: source) + } +2:276a +3:276a +====1 +1:172,176c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Animal in + let sourceDictionary = source as! [AnyHashable: Any] + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal" { + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) +2:278,299c +3:278,299c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal"{ + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) + } + let result = instance == nil ? Animal() : instance as! Animal + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Animal", actual: "\(source)")) +====1 +1:178,182c + let result = instance == nil ? Animal() : instance as! Animal + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + return result +2:300a +3:300a +====1 +1:186,194c + // Decoder for [AnimalFarm] + Decoders.addDecoder(clazz: [AnimalFarm].self) { (source: AnyObject, instance: AnyObject?) -> [AnimalFarm] in + return Decoders.decode(clazz: [AnimalFarm].self, source: source) + } + // Decoder for AnimalFarm + Decoders.addDecoder(clazz: AnimalFarm.self) { (source: AnyObject, instance: AnyObject?) -> AnimalFarm in + let sourceArray = source as! [AnyObject] + return sourceArray.map({ Decoders.decode(clazz: Animal.self, source: $0, instance: nil) }) + } +2:303a +3:303a +====1 +1:197,200c + // Decoder for [ApiResponse] + Decoders.addDecoder(clazz: [ApiResponse].self) { (source: AnyObject, instance: AnyObject?) -> [ApiResponse] in + return Decoders.decode(clazz: [ApiResponse].self, source: source) + } +2:306c +3:306c + +====1 +1:202,209c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> ApiResponse in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + + result.code = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) + result.type = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) + result.message = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) + return result +2:308,332c +3:308,332c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) { + + case let .success(value): result.code = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) { + + case let .success(value): result.type = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) { + + case let .success(value): result.message = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ApiResponse", actual: "\(source)")) + } +====1 +1:213,216c + // Decoder for [ArrayOfArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfArrayOfNumberOnly].self, source: source) + } +2:335a +3:335a +====1 +1:218,223c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + + result.arrayArrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) + return result +2:337,349c +3:337,349c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [[Double]].self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayArrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:227,230c + // Decoder for [ArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfNumberOnly].self, source: source) + } +2:352a +3:352a +====1 +1:232,237c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + + result.arrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayNumber"] as AnyObject?) + return result +2:354,366c +3:354,366c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [Double].self, source: sourceDictionary["ArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:241,244c + // Decoder for [ArrayTest] + Decoders.addDecoder(clazz: [ArrayTest].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayTest] in + return Decoders.decode(clazz: [ArrayTest].self, source: source) + } +2:369a +3:369a +====1 +1:246,253c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> ArrayTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + + result.arrayOfString = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_of_string"] as AnyObject?) + result.arrayArrayOfInteger = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) + result.arrayArrayOfModel = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_model"] as AnyObject?) + return result +2:371,395c +3:371,395c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["array_of_string"] as AnyObject?) { + + case let .success(value): result.arrayOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[Int64]].self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[ReadOnlyFirst]].self, source: sourceDictionary["array_array_of_model"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfModel = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayTest", actual: "\(source)")) + } +====1 +1:257,260c + // Decoder for [Capitalization] + Decoders.addDecoder(clazz: [Capitalization].self) { (source: AnyObject, instance: AnyObject?) -> [Capitalization] in + return Decoders.decode(clazz: [Capitalization].self, source: source) + } +2:398a +3:398a +====1 +1:262,272c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Capitalization in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Capitalization() : instance as! Capitalization + + result.smallCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) + result.capitalCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) + result.smallSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) + result.capitalSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) + result.sCAETHFlowPoints = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) + result.ATT_NAME = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) + return result +2:400,442c +3:400,442c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Capitalization() : instance as! Capitalization + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) { + + case let .success(value): result.smallCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) { + + case let .success(value): result.capitalCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) { + + case let .success(value): result.smallSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) { + + case let .success(value): result.capitalSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) { + + case let .success(value): result.sCAETHFlowPoints = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) { + + case let .success(value): result.ATT_NAME = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Capitalization", actual: "\(source)")) + } +====1 +1:276,279c + // Decoder for [Cat] + Decoders.addDecoder(clazz: [Cat].self) { (source: AnyObject, instance: AnyObject?) -> [Cat] in + return Decoders.decode(clazz: [Cat].self, source: source) + } +2:445a +3:445a +====1 +1:281,291c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Cat in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.declawed = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) + return result +2:447,474c +3:447,474c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) { + + case let .success(value): result.declawed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Cat", actual: "\(source)")) + } +====1 +1:295,298c + // Decoder for [Category] + Decoders.addDecoder(clazz: [Category].self) { (source: AnyObject, instance: AnyObject?) -> [Category] in + return Decoders.decode(clazz: [Category].self, source: source) + } +2:477a +3:477a +====1 +1:300,306c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Category in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Category() : instance as! Category + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:479,497c +3:479,497c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Category() : instance as! Category + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Category", actual: "\(source)")) + } +====1 +1:310,313c + // Decoder for [ClassModel] + Decoders.addDecoder(clazz: [ClassModel].self) { (source: AnyObject, instance: AnyObject?) -> [ClassModel] in + return Decoders.decode(clazz: [ClassModel].self, source: source) + } +2:500a +3:500a +====1 +1:315,320c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> ClassModel in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ClassModel() : instance as! ClassModel + + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) + return result +2:502,514c +3:502,514c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ClassModel() : instance as! ClassModel + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ClassModel", actual: "\(source)")) + } +====1 +1:324,327c + // Decoder for [Client] + Decoders.addDecoder(clazz: [Client].self) { (source: AnyObject, instance: AnyObject?) -> [Client] in + return Decoders.decode(clazz: [Client].self, source: source) + } +2:517a +3:517a +====1 +1:329,334c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Client in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Client() : instance as! Client + + result.client = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) + return result +2:519,531c +3:519,531c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Client() : instance as! Client + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) { + + case let .success(value): result.client = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Client", actual: "\(source)")) + } +====1 +1:338,341c + // Decoder for [Dog] + Decoders.addDecoder(clazz: [Dog].self) { (source: AnyObject, instance: AnyObject?) -> [Dog] in + return Decoders.decode(clazz: [Dog].self, source: source) + } +2:534a +3:534a +====1 +1:343,353c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Dog in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.breed = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) + return result +2:536,563c +3:536,563c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) { + + case let .success(value): result.breed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Dog", actual: "\(source)")) + } +====1 +1:357,360c + // Decoder for [EnumArrays] + Decoders.addDecoder(clazz: [EnumArrays].self) { (source: AnyObject, instance: AnyObject?) -> [EnumArrays] in + return Decoders.decode(clazz: [EnumArrays].self, source: source) + } +2:566a +3:566a +====1 +1:362,371c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> EnumArrays in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + + if let justSymbol = sourceDictionary["just_symbol"] as? String { + result.justSymbol = EnumArrays.JustSymbol(rawValue: (justSymbol)) + } + + if let arrayEnum = sourceDictionary["array_enum"] as? [String] { + result.arrayEnum = arrayEnum.map ({ EnumArrays.ArrayEnum(rawValue: $0)! }) +2:568,585c +3:568,585c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + switch Decoders.decodeOptional(clazz: EnumArrays.JustSymbol.self, source: sourceDictionary["just_symbol"] as AnyObject?) { + + case let .success(value): result.justSymbol = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_enum"] as AnyObject?) { + + case let .success(value): result.arrayEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumArrays", actual: "\(source)")) +====1 +1:373,374c + + return result +2:586a +3:586a +====1 +1:378,381c + // Decoder for [EnumClass] + Decoders.addDecoder(clazz: [EnumClass].self) { (source: AnyObject, instance: AnyObject?) -> [EnumClass] in + return Decoders.decode(clazz: [EnumClass].self, source: source) + } +2:589a +3:589a +====1 +1:383,389c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> EnumClass in + if let source = source as? String { + if let result = EnumClass(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type EnumClass: Maybe swagger file is insufficient") +2:591,593c +3:591,593c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: EnumClass.self, source: source, instance: instance) +====1 +1:393,396c + // Decoder for [EnumTest] + Decoders.addDecoder(clazz: [EnumTest].self) { (source: AnyObject, instance: AnyObject?) -> [EnumTest] in + return Decoders.decode(clazz: [EnumTest].self, source: source) + } +2:596a +3:596a +==== +1:398,415c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> EnumTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumTest() : instance as! EnumTest + + if let enumString = sourceDictionary["enum_string"] as? String { + result.enumString = EnumTest.EnumString(rawValue: (enumString)) + } + + if let enumInteger = sourceDictionary["enum_integer"] as? Int32 { + result.enumInteger = EnumTest.EnumInteger(rawValue: (enumInteger)) + } + + if let enumNumber = sourceDictionary["enum_number"] as? Double { + result.enumNumber = EnumTest.EnumNumber(rawValue: (enumNumber)) + } + + result.outerEnum = Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) + return result +2:598,1062c + <<<<<<< HEAD + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumTest() : instance as! EnumTest + switch Decoders.decodeOptional(clazz: EnumTest.EnumString.self, source: sourceDictionary["enum_string"] as AnyObject?) { + + case let .success(value): result.enumString = value + case let .failure(error): return .failure(error) + + ||||||| 4479382ced + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> EnumTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumTest() : instance as! EnumTest + + if let enumString = sourceDictionary["enum_string"] as? String { + result.enumString = EnumTest.EnumString(rawValue: (enumString)) + } + + if let enumInteger = sourceDictionary["enum_integer"] as? Int32 { + result.enumInteger = EnumTest.EnumInteger(rawValue: (enumInteger)) + } + + if let enumNumber = sourceDictionary["enum_number"] as? Double { + result.enumNumber = EnumTest.EnumNumber(rawValue: (enumNumber)) + } + + result.outerEnum = Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) + return result + } + + + // Decoder for [FormatTest] + Decoders.addDecoder(clazz: [FormatTest].self) { (source: AnyObject, instance: AnyObject?) -> [FormatTest] in + return Decoders.decode(clazz: [FormatTest].self, source: source) + } + // Decoder for FormatTest + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> FormatTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? FormatTest() : instance as! FormatTest + + result.integer = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) + result.int32 = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) + result.int64 = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) + result.number = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) + result.float = Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) + result.double = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) + result.string = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) + result.byte = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) + result.binary = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) + result.date = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["date"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + return result + } + + + // Decoder for [HasOnlyReadOnly] + Decoders.addDecoder(clazz: [HasOnlyReadOnly].self) { (source: AnyObject, instance: AnyObject?) -> [HasOnlyReadOnly] in + return Decoders.decode(clazz: [HasOnlyReadOnly].self, source: source) + } + // Decoder for HasOnlyReadOnly + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> HasOnlyReadOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.foo = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) + return result + } + + + // Decoder for [List] + Decoders.addDecoder(clazz: [List].self) { (source: AnyObject, instance: AnyObject?) -> [List] in + return Decoders.decode(clazz: [List].self, source: source) + } + // Decoder for List + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> List in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? List() : instance as! List + + result._123List = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) + return result + } + + + // Decoder for [MapTest] + Decoders.addDecoder(clazz: [MapTest].self) { (source: AnyObject, instance: AnyObject?) -> [MapTest] in + return Decoders.decode(clazz: [MapTest].self, source: source) + } + // Decoder for MapTest + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> MapTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MapTest() : instance as! MapTest + + result.mapMapOfString = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_map_of_string"] as AnyObject?) + if let mapOfEnumString = sourceDictionary["map_of_enum_string"] as? [String:String] { //TODO: handle enum map scenario + } + + return result + } + + + // Decoder for [MixedPropertiesAndAdditionalPropertiesClass] + Decoders.addDecoder(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [MixedPropertiesAndAdditionalPropertiesClass] in + return Decoders.decode(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self, source: source) + } + // Decoder for MixedPropertiesAndAdditionalPropertiesClass + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> MixedPropertiesAndAdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.map = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map"] as AnyObject?) + return result + } + + + // Decoder for [Model200Response] + Decoders.addDecoder(clazz: [Model200Response].self) { (source: AnyObject, instance: AnyObject?) -> [Model200Response] in + return Decoders.decode(clazz: [Model200Response].self, source: source) + } + // Decoder for Model200Response + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Model200Response in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Model200Response() : instance as! Model200Response + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) + return result + } + + + // Decoder for [Name] + Decoders.addDecoder(clazz: [Name].self) { (source: AnyObject, instance: AnyObject?) -> [Name] in + return Decoders.decode(clazz: [Name].self, source: source) + } + // Decoder for Name + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Name in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Name() : instance as! Name + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result.snakeCase = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) + result.property = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) + result._123Number = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) + return result + } + + + // Decoder for [NumberOnly] + Decoders.addDecoder(clazz: [NumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [NumberOnly] in + return Decoders.decode(clazz: [NumberOnly].self, source: source) + } + // Decoder for NumberOnly + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> NumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + + result.justNumber = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) + return result + } + + + // Decoder for [Order] + Decoders.addDecoder(clazz: [Order].self) { (source: AnyObject, instance: AnyObject?) -> [Order] in + return Decoders.decode(clazz: [Order].self, source: source) + } + // Decoder for Order + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Order in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Order() : instance as! Order + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.petId = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) + result.quantity = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) + result.shipDate = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Order.Status(rawValue: (status)) + } + + result.complete = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) + return result + } + + + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } + // Decoder for OuterBoolean + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject) -> OuterBoolean in + if let source = source as? Bool { + return source + } + fatalError("Source \(source) is not convertible to typealias OuterBoolean: Maybe swagger file is insufficient") + } + + + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } + // Decoder for OuterComposite + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + + let instance = OuterComposite() + instance.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + instance.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + instance.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return instance + } + + + // Decoder for [OuterEnum] + Decoders.addDecoder(clazz: [OuterEnum].self) { (source: AnyObject, instance: AnyObject?) -> [OuterEnum] in + return Decoders.decode(clazz: [OuterEnum].self, source: source) + } + // Decoder for OuterEnum + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> OuterEnum in + if let source = source as? String { + if let result = OuterEnum(rawValue: source) { + return result + ======= + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> EnumTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumTest() : instance as! EnumTest + + if let enumString = sourceDictionary["enum_string"] as? String { + result.enumString = EnumTest.EnumString(rawValue: (enumString)) + } + + if let enumInteger = sourceDictionary["enum_integer"] as? Int32 { + result.enumInteger = EnumTest.EnumInteger(rawValue: (enumInteger)) + } + + if let enumNumber = sourceDictionary["enum_number"] as? Double { + result.enumNumber = EnumTest.EnumNumber(rawValue: (enumNumber)) + } + + result.outerEnum = Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) + return result + } + + + // Decoder for [FormatTest] + Decoders.addDecoder(clazz: [FormatTest].self) { (source: AnyObject, instance: AnyObject?) -> [FormatTest] in + return Decoders.decode(clazz: [FormatTest].self, source: source) + } + // Decoder for FormatTest + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> FormatTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? FormatTest() : instance as! FormatTest + + result.integer = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) + result.int32 = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) + result.int64 = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) + result.number = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) + result.float = Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) + result.double = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) + result.string = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) + result.byte = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) + result.binary = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) + result.date = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["date"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + return result + } + + + // Decoder for [HasOnlyReadOnly] + Decoders.addDecoder(clazz: [HasOnlyReadOnly].self) { (source: AnyObject, instance: AnyObject?) -> [HasOnlyReadOnly] in + return Decoders.decode(clazz: [HasOnlyReadOnly].self, source: source) + } + // Decoder for HasOnlyReadOnly + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> HasOnlyReadOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.foo = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) + return result + } + + + // Decoder for [List] + Decoders.addDecoder(clazz: [List].self) { (source: AnyObject, instance: AnyObject?) -> [List] in + return Decoders.decode(clazz: [List].self, source: source) + } + // Decoder for List + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> List in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? List() : instance as! List + + result._123List = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) + return result + } + + + // Decoder for [MapTest] + Decoders.addDecoder(clazz: [MapTest].self) { (source: AnyObject, instance: AnyObject?) -> [MapTest] in + return Decoders.decode(clazz: [MapTest].self, source: source) + } + // Decoder for MapTest + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> MapTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MapTest() : instance as! MapTest + + result.mapMapOfString = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_map_of_string"] as AnyObject?) + if let mapOfEnumString = sourceDictionary["map_of_enum_string"] as? [String:String] { //TODO: handle enum map scenario + } + + return result + } + + + // Decoder for [MixedPropertiesAndAdditionalPropertiesClass] + Decoders.addDecoder(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [MixedPropertiesAndAdditionalPropertiesClass] in + return Decoders.decode(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self, source: source) + } + // Decoder for MixedPropertiesAndAdditionalPropertiesClass + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> MixedPropertiesAndAdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.map = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map"] as AnyObject?) + return result + } + + + // Decoder for [Model200Response] + Decoders.addDecoder(clazz: [Model200Response].self) { (source: AnyObject, instance: AnyObject?) -> [Model200Response] in + return Decoders.decode(clazz: [Model200Response].self, source: source) + } + // Decoder for Model200Response + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Model200Response in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Model200Response() : instance as! Model200Response + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) + return result + } + + + // Decoder for [Name] + Decoders.addDecoder(clazz: [Name].self) { (source: AnyObject, instance: AnyObject?) -> [Name] in + return Decoders.decode(clazz: [Name].self, source: source) + } + // Decoder for Name + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Name in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Name() : instance as! Name + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result.snakeCase = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) + result.property = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) + result._123Number = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) + return result + } + + + // Decoder for [NumberOnly] + Decoders.addDecoder(clazz: [NumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [NumberOnly] in + return Decoders.decode(clazz: [NumberOnly].self, source: source) + } + // Decoder for NumberOnly + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> NumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + + result.justNumber = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) + return result + } + + + // Decoder for [Order] + Decoders.addDecoder(clazz: [Order].self) { (source: AnyObject, instance: AnyObject?) -> [Order] in + return Decoders.decode(clazz: [Order].self, source: source) + } + // Decoder for Order + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Order in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Order() : instance as! Order + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.petId = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) + result.quantity = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) + result.shipDate = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Order.Status(rawValue: (status)) + } + + result.complete = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) + return result + } + + + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject, instance: AnyObject?) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } + // Decoder for OuterBoolean + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> OuterBoolean in + if let source = source as? Bool { + return source + } + fatalError("Source \(source) is not convertible to typealias OuterBoolean: Maybe swagger file is insufficient") + } + + + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject, instance: AnyObject?) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } + // Decoder for OuterComposite + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + + result.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + result.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + result.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return result + } + + + // Decoder for [OuterEnum] + Decoders.addDecoder(clazz: [OuterEnum].self) { (source: AnyObject, instance: AnyObject?) -> [OuterEnum] in + return Decoders.decode(clazz: [OuterEnum].self, source: source) + } + // Decoder for OuterEnum + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> OuterEnum in + if let source = source as? String { + if let result = OuterEnum(rawValue: source) { + return result + >>>>>>> TEMP_RIGHT_BRANCH + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumInteger.self, source: sourceDictionary["enum_integer"] as AnyObject?) { + + case let .success(value): result.enumInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumNumber.self, source: sourceDictionary["enum_number"] as AnyObject?) { + + case let .success(value): result.enumNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) { + + case let .success(value): result.outerEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumTest", actual: "\(source)")) + } +3:598,628c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumTest() : instance as! EnumTest + switch Decoders.decodeOptional(clazz: EnumTest.EnumString.self, source: sourceDictionary["enum_string"] as AnyObject?) { + + case let .success(value): result.enumString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumInteger.self, source: sourceDictionary["enum_integer"] as AnyObject?) { + + case let .success(value): result.enumInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumNumber.self, source: sourceDictionary["enum_number"] as AnyObject?) { + + case let .success(value): result.enumNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) { + + case let .success(value): result.outerEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumTest", actual: "\(source)")) + } +==== +1:419,422c + // Decoder for [FormatTest] + Decoders.addDecoder(clazz: [FormatTest].self) { (source: AnyObject, instance: AnyObject?) -> [FormatTest] in + return Decoders.decode(clazz: [FormatTest].self, source: source) + } +2:1066c + <<<<<<< HEAD +3:631a +==== +1:424,441c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> FormatTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? FormatTest() : instance as! FormatTest + + result.integer = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) + result.int32 = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) + result.int64 = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) + result.number = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) + result.float = Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) + result.double = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) + result.string = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) + result.byte = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) + result.binary = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) + result.date = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["date"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + return result +2:1068,1161c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? FormatTest() : instance as! FormatTest + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) { + + case let .success(value): result.integer = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) { + + case let .success(value): result.int32 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) { + + case let .success(value): result.int64 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) { + + case let .success(value): result.number = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) { + + case let .success(value): result.float = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) { + + case let .success(value): result.double = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) { + + case let .success(value): result.string = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) { + + case let .success(value): result.byte = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) { + + case let .success(value): result.binary = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: ISOFullDate.self, source: sourceDictionary["date"] as AnyObject?) { + + case let .success(value): result.date = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "FormatTest", actual: "\(source)")) + } + ||||||| 4479382ced + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + ======= + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject, instance: AnyObject?) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + >>>>>>> TEMP_RIGHT_BRANCH +3:633,717c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? FormatTest() : instance as! FormatTest + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) { + + case let .success(value): result.integer = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) { + + case let .success(value): result.int32 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) { + + case let .success(value): result.int64 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) { + + case let .success(value): result.number = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) { + + case let .success(value): result.float = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) { + + case let .success(value): result.double = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) { + + case let .success(value): result.string = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) { + + case let .success(value): result.byte = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) { + + case let .success(value): result.binary = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: ISOFullDate.self, source: sourceDictionary["date"] as AnyObject?) { + + case let .success(value): result.date = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "FormatTest", actual: "\(source)")) + } +====1 +1:445,448c + // Decoder for [HasOnlyReadOnly] + Decoders.addDecoder(clazz: [HasOnlyReadOnly].self) { (source: AnyObject, instance: AnyObject?) -> [HasOnlyReadOnly] in + return Decoders.decode(clazz: [HasOnlyReadOnly].self, source: source) + } +2:1164a +3:720a +====1 +1:450,456c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> HasOnlyReadOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.foo = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) + return result +2:1166,1184c +3:722,740c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) { + + case let .success(value): result.foo = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "HasOnlyReadOnly", actual: "\(source)")) + } +====1 +1:460,463c + // Decoder for [List] + Decoders.addDecoder(clazz: [List].self) { (source: AnyObject, instance: AnyObject?) -> [List] in + return Decoders.decode(clazz: [List].self, source: source) + } +2:1187a +3:743a +====1 +1:465,470c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> List in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? List() : instance as! List + + result._123List = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) + return result +2:1189,1201c +3:745,757c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? List() : instance as! List + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) { + + case let .success(value): result._123List = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "List", actual: "\(source)")) + } +====1 +1:474,477c + // Decoder for [MapTest] + Decoders.addDecoder(clazz: [MapTest].self) { (source: AnyObject, instance: AnyObject?) -> [MapTest] in + return Decoders.decode(clazz: [MapTest].self, source: source) + } +2:1204a +3:760a +====1 +1:479,484c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> MapTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MapTest() : instance as! MapTest + + result.mapMapOfString = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_map_of_string"] as AnyObject?) + if let mapOfEnumString = sourceDictionary["map_of_enum_string"] as? [String:String] { //TODO: handle enum map scenario +2:1206,1223c +3:762,779c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MapTest() : instance as! MapTest + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_map_of_string"] as AnyObject?) { + + case let .success(value): result.mapMapOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: MapTest.MapOfEnumString.self, source: sourceDictionary["map_of_enum_string"] as AnyObject?) { + /* + case let .success(value): result.mapOfEnumString = value + case let .failure(error): return .failure(error) + */ default: break //TODO: handle enum map scenario + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MapTest", actual: "\(source)")) +====1 +1:486,487c + + return result +2:1224a +3:780a +====1 +1:491,494c + // Decoder for [MixedPropertiesAndAdditionalPropertiesClass] + Decoders.addDecoder(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [MixedPropertiesAndAdditionalPropertiesClass] in + return Decoders.decode(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self, source: source) + } +2:1227a +3:783a +====1 +1:496,503c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> MixedPropertiesAndAdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.map = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map"] as AnyObject?) + return result +2:1229,1253c +3:785,809c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:Animal].self, source: sourceDictionary["map"] as AnyObject?) { + + case let .success(value): result.map = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MixedPropertiesAndAdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:507,510c + // Decoder for [Model200Response] + Decoders.addDecoder(clazz: [Model200Response].self) { (source: AnyObject, instance: AnyObject?) -> [Model200Response] in + return Decoders.decode(clazz: [Model200Response].self, source: source) + } +2:1256a +3:812a +====1 +1:512,518c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Model200Response in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Model200Response() : instance as! Model200Response + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) + return result +2:1258,1276c +3:814,832c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Model200Response() : instance as! Model200Response + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Model200Response", actual: "\(source)")) + } +====1 +1:522,525c + // Decoder for [Name] + Decoders.addDecoder(clazz: [Name].self) { (source: AnyObject, instance: AnyObject?) -> [Name] in + return Decoders.decode(clazz: [Name].self, source: source) + } +2:1279a +3:835a +====1 +1:527,535c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Name in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Name() : instance as! Name + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result.snakeCase = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) + result.property = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) + result._123Number = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) + return result +2:1281,1311c +3:837,867c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Name() : instance as! Name + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) { + + case let .success(value): result.snakeCase = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) { + + case let .success(value): result.property = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) { + + case let .success(value): result._123Number = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Name", actual: "\(source)")) + } +====1 +1:539,542c + // Decoder for [NumberOnly] + Decoders.addDecoder(clazz: [NumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [NumberOnly] in + return Decoders.decode(clazz: [NumberOnly].self, source: source) + } +2:1314a +3:870a +====1 +1:544,549c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> NumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + + result.justNumber = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) + return result +2:1316,1328c +3:872,884c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) { + + case let .success(value): result.justNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "NumberOnly", actual: "\(source)")) + } +====1 +1:553,556c + // Decoder for [Order] + Decoders.addDecoder(clazz: [Order].self) { (source: AnyObject, instance: AnyObject?) -> [Order] in + return Decoders.decode(clazz: [Order].self, source: source) + } +2:1331a +3:887a +====1 +1:558,571c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Order in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Order() : instance as! Order + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.petId = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) + result.quantity = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) + result.shipDate = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Order.Status(rawValue: (status)) + } + + result.complete = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) + return result +2:1333,1375c +3:889,931c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Order() : instance as! Order + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) { + + case let .success(value): result.petId = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) { + + case let .success(value): result.quantity = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) { + + case let .success(value): result.shipDate = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Order.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) { + + case let .success(value): result.complete = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Order", actual: "\(source)")) + } +====1 +1:575,578c + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } +2:1378a +3:934a +====1 +1:580c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject) -> OuterBoolean in +2:1380c +3:936c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:582c + return source +2:1382,1384c +3:938,940c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterBoolean", actual: "\(source)")) +====1 +1:584c + fatalError("Source \(source) is not convertible to typealias OuterBoolean: Maybe swagger file is insufficient") +2:1385a +3:941a +====1 +1:588,591c + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } +2:1388a +3:944a +====1 +1:593,600c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + + let instance = OuterComposite() + instance.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + instance.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + instance.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return instance +2:1390,1414c +3:946,970c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + switch Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) { + + case let .success(value): result.myNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) { + + case let .success(value): result.myString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) { + + case let .success(value): result.myBoolean = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "OuterComposite", actual: "\(source)")) + } +====1 +1:604,607c + // Decoder for [OuterEnum] + Decoders.addDecoder(clazz: [OuterEnum].self) { (source: AnyObject, instance: AnyObject?) -> [OuterEnum] in + return Decoders.decode(clazz: [OuterEnum].self, source: source) + } +2:1417a +3:973a +====1 +1:609,615c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> OuterEnum in + if let source = source as? String { + if let result = OuterEnum(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type OuterEnum: Maybe swagger file is insufficient") +2:1419,1421c +3:975,977c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: OuterEnum.self, source: source, instance: instance) +====1 +1:619,622c + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + } +2:1424a +3:980a +==== +1:624c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject) -> OuterNumber in +2:1426,1432c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject) -> OuterNumber in + ======= + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> OuterNumber in + >>>>>>> TEMP_RIGHT_BRANCH +3:982c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:626c + return source +2:1434,1436c +3:984,986c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterNumber", actual: "\(source)")) +====1 +1:628c + fatalError("Source \(source) is not convertible to typealias OuterNumber: Maybe swagger file is insufficient") +2:1437a +3:987a +==== +1:632,635c + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } +2:1441,1452c + <<<<<<< HEAD + ||||||| 4479382ced + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } + ======= + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject, instance: AnyObject?) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } + >>>>>>> TEMP_RIGHT_BRANCH +3:990a +==== +1:637c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject) -> OuterString in +2:1454,1460c + <<<<<<< HEAD + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + ||||||| 4479382ced + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject) -> OuterString in + ======= + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> OuterString in + >>>>>>> TEMP_RIGHT_BRANCH +3:992c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:639c + return source +2:1462,1464c +3:994,996c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterString", actual: "\(source)")) +====1 +1:641c + fatalError("Source \(source) is not convertible to typealias OuterString: Maybe swagger file is insufficient") +2:1465a +3:997a +====1 +1:645,648c + // Decoder for [Pet] + Decoders.addDecoder(clazz: [Pet].self) { (source: AnyObject, instance: AnyObject?) -> [Pet] in + return Decoders.decode(clazz: [Pet].self, source: source) + } +2:1468a +3:1000a +====1 +1:650,663c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Pet in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Pet() : instance as! Pet + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.category = Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + result.photoUrls = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["photoUrls"] as AnyObject?) + result.tags = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["tags"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Pet.Status(rawValue: (status)) + } + + return result +2:1470,1512c +3:1002,1044c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Pet() : instance as! Pet + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) { + + case let .success(value): result.category = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["photoUrls"] as AnyObject?) { + + case let .success(value): result.photoUrls = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [Tag].self, source: sourceDictionary["tags"] as AnyObject?) { + + case let .success(value): result.tags = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Pet.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Pet", actual: "\(source)")) + } +====1 +1:667,670c + // Decoder for [ReadOnlyFirst] + Decoders.addDecoder(clazz: [ReadOnlyFirst].self) { (source: AnyObject, instance: AnyObject?) -> [ReadOnlyFirst] in + return Decoders.decode(clazz: [ReadOnlyFirst].self, source: source) + } +2:1515a +3:1047a +====1 +1:672,678c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> ReadOnlyFirst in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.baz = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) + return result +2:1517,1535c +3:1049,1067c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) { + + case let .success(value): result.baz = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ReadOnlyFirst", actual: "\(source)")) + } +====1 +1:682,685c + // Decoder for [Return] + Decoders.addDecoder(clazz: [Return].self) { (source: AnyObject, instance: AnyObject?) -> [Return] in + return Decoders.decode(clazz: [Return].self, source: source) + } +2:1538a +3:1070a +====1 +1:687,692c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Return in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Return() : instance as! Return + + result._return = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) + return result +2:1540,1552c +3:1072,1084c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Return() : instance as! Return + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) { + + case let .success(value): result._return = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Return", actual: "\(source)")) + } +====1 +1:696,699c + // Decoder for [SpecialModelName] + Decoders.addDecoder(clazz: [SpecialModelName].self) { (source: AnyObject, instance: AnyObject?) -> [SpecialModelName] in + return Decoders.decode(clazz: [SpecialModelName].self, source: source) + } +2:1555a +3:1087a +====1 +1:701,706c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> SpecialModelName in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + + result.specialPropertyName = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) + return result +2:1557,1569c +3:1089,1101c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) { + + case let .success(value): result.specialPropertyName = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "SpecialModelName", actual: "\(source)")) + } +====1 +1:710,713c + // Decoder for [Tag] + Decoders.addDecoder(clazz: [Tag].self) { (source: AnyObject, instance: AnyObject?) -> [Tag] in + return Decoders.decode(clazz: [Tag].self, source: source) + } +2:1572a +3:1104a +====1 +1:715,721c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Tag in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Tag() : instance as! Tag + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:1574,1592c +3:1106,1124c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Tag() : instance as! Tag + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Tag", actual: "\(source)")) + } +====1 +1:725,728c + // Decoder for [User] + Decoders.addDecoder(clazz: [User].self) { (source: AnyObject, instance: AnyObject?) -> [User] in + return Decoders.decode(clazz: [User].self, source: source) + } +2:1595a +3:1127a +====1 +1:730,742c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> User in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? User() : instance as! User + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.username = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) + result.firstName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) + result.lastName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) + result.email = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + result.phone = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) + result.userStatus = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) + return result +2:1597,1651c +3:1129,1183c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? User() : instance as! User + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) { + + case let .success(value): result.username = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) { + + case let .success(value): result.firstName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) { + + case let .success(value): result.lastName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) { + + case let .success(value): result.email = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) { + + case let .success(value): result.phone = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) { + + case let .success(value): result.userStatus = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "User", actual: "\(source)")) + } +====1 +1:749c + } +\ No newline at end of file +2:1658c +3:1190c + } diff --git a/src/python/merge_conflict_analysis_diffs/1897/intellimerge/diff_PetApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/intellimerge/diff_PetApi.php.txt new file mode 100644 index 0000000000..3aec4a7f5f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/intellimerge/diff_PetApi.php.txt @@ -0,0 +1,1666 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return PetApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->addPetWithHttpInfo($body); + return $response; +2:96c +3:96c + $this->addPetWithHttpInfo($body); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/pet"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/pet'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/json', 'application/xml']); +2:120,124c +3:120,124c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:140a +2:135c +3:135c + +====1 +1:142c + $httpBody = $formParams; // for HTTP post (form) +2:137,149c +3:137,149c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:143a +2:151,162c +3:151,162c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/json', 'application/xml'] + ); + } + +====1 +1:145,146c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:164,165c +3:164,165c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:148c + // make the API Call +2:167,188c +3:167,188c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:150,158c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet' + ); +2:189a +3:189a +====1 +1:160c + return [null, $statusCode, $httpHeader]; +2:191,213c +3:191,213c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:164c + +2:216a +3:216a +====1 +1:168c + +2:219a +3:219a +====1 +1:176a +2:228c +3:228c + * @throws \InvalidArgumentException +====1 +1:181,182c + list($response) = $this->deletePetWithHttpInfo($pet_id, $api_key); + return $response; +2:233c +3:233c + $this->deletePetWithHttpInfo($pet_id, $api_key); +====1 +1:192a +2:244c +3:244c + * @throws \InvalidArgumentException +====1 +1:201,203c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:253,255c +3:253,255c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:206,211c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:258,260c +3:258,260c + $httpBody = ''; + $multipart = false; + $returnType = ''; +====1 +1:215c + $headerParams['api_key'] = $this->apiClient->getSerializer()->toHeaderValue($api_key); +2:264c +3:264c + $headerParams['api_key'] = ObjectSerializer::toHeaderValue($api_key); +====1 +1:216a +2:266c +3:266c + +====1 +1:219,223c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:269c +3:269c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +==== +1:225c + +2:271,278c + <<<<<<< HEAD + + + ||||||| 4479382ced + + ======= + + >>>>>>> TEMP_RIGHT_BRANCH +3:271,272c + + +====1 +1:228a +2:282c +3:276c + +====1 +1:230c + $httpBody = $formParams; // for HTTP post (form) +2:284,307c +3:278,301c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:231a +2:309c +3:303c + +====1 +1:233,234c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:311,312c +3:305,306c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:236c + // make the API Call +2:314,335c +3:308,329c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + +====1 +1:238,246c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet/{petId}' + ); +2:336a +3:330a +====1 +1:248c + return [null, $statusCode, $httpHeader]; +2:338,360c +3:332,354c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:252c + +2:363a +3:357a +====1 +1:256c + +2:366a +3:360a +====1 +1:263a +2:374c +3:368c + * @throws \InvalidArgumentException +====1 +1:278a +2:390c +3:384c + * @throws \InvalidArgumentException +====1 +1:287,289c + // parse inputs + $resourcePath = "/pet/findByStatus"; + $httpBody = ''; +2:399,401c +3:393,395c + + $resourcePath = '/pet/findByStatus'; + $formParams = []; +====1 +1:292,297c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:404,406c +3:398,400c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet[]'; +====1 +1:301c + $status = $this->apiClient->getSerializer()->serializeCollection($status, 'csv', true); +2:410c +3:404c + $status = ObjectSerializer::serializeCollection($status, 'csv', true); +====1 +1:304c + $queryParams['status'] = $this->apiClient->getSerializer()->toQueryValue($status); +2:413c +3:407c + $queryParams['status'] = ObjectSerializer::toQueryValue($status); +==== +1:306c + +2:415,423c + <<<<<<< HEAD + + + + ||||||| 4479382ced + + ======= + + >>>>>>> TEMP_RIGHT_BRANCH +3:409,411c + + + +====1 +1:309a +2:427c +3:415c + +====1 +1:311c + $httpBody = $formParams; // for HTTP post (form) +2:429,452c +3:417,440c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:312a +2:454c +3:442c + +====1 +1:314,315c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:456,457c +3:444,445c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:317c + // make the API Call +2:459,480c +3:447,468c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:319,327c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet[]', + '/pet/findByStatus' + ); +2:481a +3:469a +====1 +1:329c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet[]', $httpHeader), $statusCode, $httpHeader]; +2:483,519c +3:471,507c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:333c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +2:523c +3:511c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +====1 +1:337c + +2:526a +3:514a +====1 +1:341c + +2:529a +3:517a +====1 +1:348a +2:537c +3:525c + * @throws \InvalidArgumentException +====1 +1:363a +2:553c +3:541c + * @throws \InvalidArgumentException +====1 +1:372,374c + // parse inputs + $resourcePath = "/pet/findByTags"; + $httpBody = ''; +2:562,564c +3:550,552c + + $resourcePath = '/pet/findByTags'; + $formParams = []; +====1 +1:377,382c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:567,569c +3:555,557c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet[]'; +====1 +1:386c + $tags = $this->apiClient->getSerializer()->serializeCollection($tags, 'csv', true); +2:573c +3:561c + $tags = ObjectSerializer::serializeCollection($tags, 'csv', true); +====1 +1:389c + $queryParams['tags'] = $this->apiClient->getSerializer()->toQueryValue($tags); +2:576c +3:564c + $queryParams['tags'] = ObjectSerializer::toQueryValue($tags); +==== +1:391c + +2:578,586c + <<<<<<< HEAD + + + + ||||||| 4479382ced + + ======= + + >>>>>>> TEMP_RIGHT_BRANCH +3:566,568c + + + +====1 +1:394a +2:590c +3:572c + +====1 +1:396c + $httpBody = $formParams; // for HTTP post (form) +2:592,604c +3:574,586c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:397a +2:606,617c +3:588,599c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + +====1 +1:399,400c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:619,620c +3:601,602c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:402c + // make the API Call +2:622,643c +3:604,625c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:404,412c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet[]', + '/pet/findByTags' + ); +2:644a +3:626a +====1 +1:414c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet[]', $httpHeader), $statusCode, $httpHeader]; +2:646,682c +3:628,664c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:418c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +2:686c +3:668c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +====1 +1:422c + +2:689a +3:671a +====1 +1:426c + +2:692a +3:674a +====1 +1:433a +2:700c +3:682c + * @throws \InvalidArgumentException +====1 +1:448a +2:716c +3:698c + * @throws \InvalidArgumentException +====1 +1:457,459c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:725,727c +3:707,709c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:462,467c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:730,733c +3:712,715c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet'; + +====1 +1:471,475c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:737c +3:719c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +==== +1:477c + +2:739,746c + <<<<<<< HEAD + + + ||||||| 4479382ced + + ======= + + >>>>>>> TEMP_RIGHT_BRANCH +3:721,722c + + +====1 +1:480a +2:750c +3:726c + +====1 +1:482c + $httpBody = $formParams; // for HTTP post (form) +2:752,764c +3:728,740c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:484,487c + // this endpoint requires API key authentication + $apiKey = $this->apiClient->getApiKeyWithPrefix('api_key'); + if (strlen($apiKey) !== 0) { + $headerParams['api_key'] = $apiKey; +2:766,775c +3:742,751c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:489c + // make the API Call +2:777,804c +3:753,780c + + // this endpoint requires API key authentication + $apiKey = $this->config->getApiKeyWithPrefix('api_key'); + if ($apiKey !== null) { + $headers['api_key'] = $apiKey; + } + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:491,499c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet', + '/pet/{petId}' + ); +2:805a +3:781a +====1 +1:501c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet', $httpHeader), $statusCode, $httpHeader]; +2:807,843c +3:783,819c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:505c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet', $e->getResponseHeaders()); +2:847c +3:823c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet', $e->getResponseHeaders()); +====1 +1:509c + +2:850a +3:826a +====1 +1:513c + +2:853a +3:829a +====1 +1:520a +2:861c +3:837c + * @throws \InvalidArgumentException +====1 +1:525,526c + list($response) = $this->updatePetWithHttpInfo($body); + return $response; +2:866c +3:842c + $this->updatePetWithHttpInfo($body); +====1 +1:535a +2:876c +3:852c + * @throws \InvalidArgumentException +====1 +1:544,546c + // parse inputs + $resourcePath = "/pet"; + $httpBody = ''; +2:885,887c +3:861,863c + + $resourcePath = '/pet'; + $formParams = []; +====1 +1:549,554c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/json', 'application/xml']); +2:890,894c +3:866,870c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:564a +2:905c +3:881c + +====1 +1:566c + $httpBody = $formParams; // for HTTP post (form) +2:907,919c +3:883,895c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:567a +2:921,932c +3:897,908c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/json', 'application/xml'] + ); + } + +====1 +1:569,570c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:934,935c +3:910,911c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:572c + // make the API Call +2:937,958c +3:913,934c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'PUT', + $url, + $headers, + $httpBody + ); + +====1 +1:574,582c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'PUT', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet' + ); +2:959a +3:935a +====1 +1:584c + return [null, $statusCode, $httpHeader]; +2:961,983c +3:937,959c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:588c + +2:986a +3:962a +====1 +1:592c + +2:989a +3:965a +====1 +1:601a +2:999c +3:975c + * @throws \InvalidArgumentException +====1 +1:606,607c + list($response) = $this->updatePetWithFormWithHttpInfo($pet_id, $name, $status); + return $response; +2:1004c +3:980c + $this->updatePetWithFormWithHttpInfo($pet_id, $name, $status); +====1 +1:618a +2:1016c +3:992c + * @throws \InvalidArgumentException +====1 +1:627,629c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:1025,1027c +3:1001,1003c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:632,637c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/x-www-form-urlencoded']); +2:1030,1033c +3:1006,1009c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:641,645c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:1037c +3:1013c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +====1 +1:646a +2:1039c +3:1015c + +====1 +1:649c + $formParams['name'] = $this->apiClient->getSerializer()->toFormValue($name); +2:1042c +3:1018c + $formParams['name'] = ObjectSerializer::toFormValue($name); +====1 +1:653c + $formParams['status'] = $this->apiClient->getSerializer()->toFormValue($status); +2:1046c +3:1022c + $formParams['status'] = ObjectSerializer::toFormValue($status); +====1 +1:655c + +2:1048c +3:1024c + +====1 +1:658a +2:1052c +3:1028c + +====1 +1:660c + $httpBody = $formParams; // for HTTP post (form) +2:1054,1077c +3:1030,1053c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/x-www-form-urlencoded'] + ); +====1 +1:661a +2:1079c +3:1055c + +====1 +1:663,664c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:1081,1082c +3:1057,1058c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:666c + // make the API Call +2:1084,1105c +3:1060,1081c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:668,676c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet/{petId}' + ); +2:1106a +3:1082a +====1 +1:678c + return [null, $statusCode, $httpHeader]; +2:1108,1130c +3:1084,1106c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:682c + +2:1133a +3:1109a +====1 +1:686c + +2:1136a +3:1112a +====1 +1:695a +2:1146c +3:1122c + * @throws \InvalidArgumentException +====1 +1:712a +2:1164c +3:1140c + * @throws \InvalidArgumentException +====1 +1:721,723c + // parse inputs + $resourcePath = "/pet/{petId}/uploadImage"; + $httpBody = ''; +2:1173,1175c +3:1149,1151c + + $resourcePath = '/pet/{petId}/uploadImage'; + $formParams = []; +====1 +1:726,731c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['multipart/form-data']); +2:1178,1181c +3:1154,1157c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\ApiResponse'; + +====1 +1:735,739c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:1185c +3:1161c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +====1 +1:740a +2:1187c +3:1163c + +====1 +1:743c + $formParams['additionalMetadata'] = $this->apiClient->getSerializer()->toFormValue($additional_metadata); +2:1190c +3:1166c + $formParams['additionalMetadata'] = ObjectSerializer::toFormValue($additional_metadata); +====1 +1:747,753c + // PHP 5.5 introduced a CurlFile object that deprecates the old @filename syntax + // See: https://wiki.php.net/rfc/curl-file-upload + if (function_exists('curl_file_create')) { + $formParams['file'] = curl_file_create($this->apiClient->getSerializer()->toFormValue($file)); + } else { + $formParams['file'] = '@' . $this->apiClient->getSerializer()->toFormValue($file); + } +2:1194,1195c +3:1170,1171c + $multipart = true; + $formParams['file'] = \GuzzleHttp\Psr7\try_fopen(ObjectSerializer::toFormValue($file), 'rb'); +====1 +1:755c + +2:1197c +3:1173c + +====1 +1:758a +2:1201c +3:1177c + +====1 +1:760c + $httpBody = $formParams; // for HTTP post (form) +2:1203,1215c +3:1179,1191c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:761a +2:1217,1228c +3:1193,1204c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/json'], + ['multipart/form-data'] + ); + } + +====1 +1:763,764c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:1230,1231c +3:1206,1207c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:766c + // make the API Call +2:1233,1254c +3:1209,1230c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:768,776c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\ApiResponse', + '/pet/{petId}/uploadImage' + ); +2:1255a +3:1231a +====1 +1:778c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\ApiResponse', $httpHeader), $statusCode, $httpHeader]; +2:1257,1293c +3:1233,1269c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:782c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\ApiResponse', $e->getResponseHeaders()); +2:1297c +3:1273c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\ApiResponse', $e->getResponseHeaders()); +====1 +1:786c + +2:1300a +3:1276a diff --git a/src/python/merge_conflict_analysis_diffs/1897/intellimerge/diff_StoreApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/intellimerge/diff_StoreApi.php.txt new file mode 100644 index 0000000000..bad1516e28 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/intellimerge/diff_StoreApi.php.txt @@ -0,0 +1,832 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return StoreApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->deleteOrderWithHttpInfo($order_id); + return $response; +2:96c +3:96c + $this->deleteOrderWithHttpInfo($order_id); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/store/order/{order_id}"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/store/order/{order_id}'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:120,123c +3:120,123c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:134,138c + $resourcePath = str_replace( + "{" . "order_id" . "}", + $this->apiClient->getSerializer()->toPathValue($order_id), + $resourcePath + ); +2:127c +3:127c + $resourcePath = str_replace('{' . 'order_id' . '}', ObjectSerializer::toPathValue($order_id), $resourcePath); +==== +1:140c + +2:129,136c + <<<<<<< HEAD + + + ||||||| 4479382ced + + ======= + + >>>>>>> TEMP_RIGHT_BRANCH +3:129,130c + + +====1 +1:143a +2:140c +3:134c + +====1 +1:145c + $httpBody = $formParams; // for HTTP post (form) +2:142,154c +3:136,148c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:147,156c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/store/order/{order_id}' +2:156,164c +3:150,158c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:157a +2:166,213c +3:160,207c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:159c + return [null, $statusCode, $httpHeader]; +2:214a +3:208a +====1 +1:163c + +2:217a +3:211a +====1 +1:167c + +2:220a +3:214a +====1 +1:173a +2:227c +3:221c + * @throws \InvalidArgumentException +====1 +1:187a +2:242c +3:236c + * @throws \InvalidArgumentException +====1 +1:192,194c + // parse inputs + $resourcePath = "/store/inventory"; + $httpBody = ''; +2:247,249c +3:241,243c + + $resourcePath = '/store/inventory'; + $formParams = []; +====1 +1:197,202c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:252,257c +3:246,251c + $httpBody = ''; + $multipart = false; + $returnType = 'map[string,int]'; + + + +====1 +1:204c + +2:258a +3:252a +====1 +1:207a +2:262c +3:256c + +====1 +1:209c + $httpBody = $formParams; // for HTTP post (form) +2:264,276c +3:258,270c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:211,214c + // this endpoint requires API key authentication + $apiKey = $this->apiClient->getApiKeyWithPrefix('api_key'); + if (strlen($apiKey) !== 0) { + $headerParams['api_key'] = $apiKey; +2:278,287c +3:272,281c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/json'], + [] + ); +====1 +1:216c + // make the API Call +2:289,316c +3:283,310c + + // this endpoint requires API key authentication + $apiKey = $this->config->getApiKeyWithPrefix('api_key'); + if ($apiKey !== null) { + $headers['api_key'] = $apiKey; + } + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:218,226c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + 'map[string,int]', + '/store/inventory' + ); +2:317a +3:311a +====1 +1:228c + return [$this->apiClient->getSerializer()->deserialize($response, 'map[string,int]', $httpHeader), $statusCode, $httpHeader]; +2:319,355c +3:313,349c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:232c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), 'map[string,int]', $e->getResponseHeaders()); +2:359c +3:353c + $data = ObjectSerializer::deserialize($e->getResponseBody(), 'map[string,int]', $e->getResponseHeaders()); +====1 +1:236c + +2:362a +3:356a +====1 +1:240c + +2:365a +3:359a +====1 +1:247a +2:373c +3:367c + * @throws \InvalidArgumentException +====1 +1:262a +2:389c +3:383c + * @throws \InvalidArgumentException +====1 +1:271c + if (($order_id > 5)) { +2:398c +3:392c + if ($order_id > 5) { +====1 +1:274c + if (($order_id < 1)) { +2:401c +3:395c + if ($order_id < 1) { +====1 +1:278,280c + // parse inputs + $resourcePath = "/store/order/{order_id}"; + $httpBody = ''; +2:405,407c +3:399,401c + + $resourcePath = '/store/order/{order_id}'; + $formParams = []; +====1 +1:283,288c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:410,413c +3:404,407c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Order'; + +====1 +1:292,296c + $resourcePath = str_replace( + "{" . "order_id" . "}", + $this->apiClient->getSerializer()->toPathValue($order_id), + $resourcePath + ); +2:417c +3:411c + $resourcePath = str_replace('{' . 'order_id' . '}', ObjectSerializer::toPathValue($order_id), $resourcePath); +==== +1:298c + +2:419,426c + <<<<<<< HEAD + + + ||||||| 4479382ced + + ======= + + >>>>>>> TEMP_RIGHT_BRANCH +3:413,414c + + +====1 +1:301a +2:430c +3:418c + +====1 +1:303c + $httpBody = $formParams; // for HTTP post (form) +2:432,444c +3:420,432c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:305,314c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Order', + '/store/order/{order_id}' +2:446,454c +3:434,442c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:315a +2:456,517c +3:444,505c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:317c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Order', $httpHeader), $statusCode, $httpHeader]; +2:518a +3:506a +====1 +1:321c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +2:522c +3:510c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +====1 +1:325c + +2:525a +3:513a +====1 +1:329c + +2:528a +3:516a +====1 +1:336a +2:536c +3:524c + * @throws \InvalidArgumentException +====1 +1:351a +2:552c +3:540c + * @throws \InvalidArgumentException +====1 +1:360,362c + // parse inputs + $resourcePath = "/store/order"; + $httpBody = ''; +2:561,563c +3:549,551c + + $resourcePath = '/store/order'; + $formParams = []; +====1 +1:365,370c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:566,570c +3:554,558c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Order'; + + +====1 +1:380a +2:581c +3:569c + +====1 +1:382c + $httpBody = $formParams; // for HTTP post (form) +2:583,595c +3:571,583c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:384,393c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Order', + '/store/order' +2:597,600c +3:585,588c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:394a +2:602,668c +3:590,656c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:396c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Order', $httpHeader), $statusCode, $httpHeader]; +2:669a +3:657a +====1 +1:400c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +2:673c +3:661c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +====1 +1:404c + +2:676a +3:664a diff --git a/src/python/merge_conflict_analysis_diffs/1897/intellimerge/diff_UserApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/intellimerge/diff_UserApi.php.txt new file mode 100644 index 0000000000..9ca82964d7 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/intellimerge/diff_UserApi.php.txt @@ -0,0 +1,1456 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return UserApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->createUserWithHttpInfo($body); + return $response; +2:96c +3:96c + $this->createUserWithHttpInfo($body); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/user"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/user'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:120,124c +3:120,124c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:140a +2:135c +3:135c + +====1 +1:142c + $httpBody = $formParams; // for HTTP post (form) +2:137,149c +3:137,149c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:144,153c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user' +2:151,154c +3:151,154c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:154a +2:156,208c +3:156,208c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:156c + return [null, $statusCode, $httpHeader]; +2:209a +3:209a +====1 +1:160c + +2:212a +3:212a +====1 +1:164c + +2:215a +3:215a +====1 +1:171a +2:223c +3:223c + * @throws \InvalidArgumentException +====1 +1:176,177c + list($response) = $this->createUsersWithArrayInputWithHttpInfo($body); + return $response; +2:228c +3:228c + $this->createUsersWithArrayInputWithHttpInfo($body); +====1 +1:186a +2:238c +3:238c + * @throws \InvalidArgumentException +====1 +1:195,197c + // parse inputs + $resourcePath = "/user/createWithArray"; + $httpBody = ''; +2:247,249c +3:247,249c + + $resourcePath = '/user/createWithArray'; + $formParams = []; +====1 +1:200,205c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:252,256c +3:252,256c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:215a +2:267c +3:267c + +====1 +1:217c + $httpBody = $formParams; // for HTTP post (form) +2:269,281c +3:269,281c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:219,228c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/createWithArray' +2:283,291c +3:283,291c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:229a +2:293,340c +3:293,340c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:231c + return [null, $statusCode, $httpHeader]; +2:341a +3:341a +====1 +1:235c + +2:344a +3:344a +====1 +1:239c + +2:347a +3:347a +====1 +1:246a +2:355c +3:355c + * @throws \InvalidArgumentException +====1 +1:251,252c + list($response) = $this->createUsersWithListInputWithHttpInfo($body); + return $response; +2:360c +3:360c + $this->createUsersWithListInputWithHttpInfo($body); +====1 +1:261a +2:370c +3:370c + * @throws \InvalidArgumentException +====1 +1:270,272c + // parse inputs + $resourcePath = "/user/createWithList"; + $httpBody = ''; +2:379,381c +3:379,381c + + $resourcePath = '/user/createWithList'; + $formParams = []; +====1 +1:275,280c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:384,388c +3:384,388c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:290a +2:399c +3:399c + +====1 +1:292c + $httpBody = $formParams; // for HTTP post (form) +2:401,413c +3:401,413c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:294,303c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/createWithList' +2:415,423c +3:415,423c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:304a +2:425,472c +3:425,472c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:306c + return [null, $statusCode, $httpHeader]; +2:473a +3:473a +====1 +1:310c + +2:476a +3:476a +====1 +1:314c + +2:479a +3:479a +====1 +1:321a +2:487c +3:487c + * @throws \InvalidArgumentException +====1 +1:326,327c + list($response) = $this->deleteUserWithHttpInfo($username); + return $response; +2:492c +3:492c + $this->deleteUserWithHttpInfo($username); +====1 +1:336a +2:502c +3:502c + * @throws \InvalidArgumentException +====1 +1:345,347c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:511,513c +3:511,513c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:350,355c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:516,519c +3:516,519c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:359,363c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:523c +3:523c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +==== +1:365c + +2:525,532c + <<<<<<< HEAD + + + ||||||| 4479382ced + + ======= + + >>>>>>> TEMP_RIGHT_BRANCH +3:525,526c + + +====1 +1:368a +2:536c +3:530c + +====1 +1:370c + $httpBody = $formParams; // for HTTP post (form) +2:538,550c +3:532,544c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:372,381c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/{username}' +2:552,560c +3:546,554c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:382a +2:562,609c +3:556,603c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:384c + return [null, $statusCode, $httpHeader]; +2:610a +3:604a +====1 +1:388c + +2:613a +3:607a +====1 +1:392c + +2:616a +3:610a +====1 +1:399a +2:624c +3:618c + * @throws \InvalidArgumentException +====1 +1:414a +2:640c +3:634c + * @throws \InvalidArgumentException +====1 +1:423,425c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:649,651c +3:643,645c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:428,433c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:654,657c +3:648,651c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\User'; + +====1 +1:437,441c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:661c +3:655c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +==== +1:443c + +2:663,670c + <<<<<<< HEAD + + + ||||||| 4479382ced + + ======= + + >>>>>>> TEMP_RIGHT_BRANCH +3:657,658c + + +====1 +1:446a +2:674c +3:662c + +====1 +1:448c + $httpBody = $formParams; // for HTTP post (form) +2:676,688c +3:664,676c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:450,459c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\User', + '/user/{username}' +2:690,693c +3:678,681c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:460a +2:695,761c +3:683,749c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:462c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\User', $httpHeader), $statusCode, $httpHeader]; +2:762a +3:750a +====1 +1:466c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\User', $e->getResponseHeaders()); +2:766c +3:754c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\User', $e->getResponseHeaders()); +====1 +1:470c + +2:769a +3:757a +====1 +1:474c + +2:772a +3:760a +====1 +1:482a +2:781c +3:769c + * @throws \InvalidArgumentException +====1 +1:498a +2:798c +3:786c + * @throws \InvalidArgumentException +====1 +1:511,513c + // parse inputs + $resourcePath = "/user/login"; + $httpBody = ''; +2:811,813c +3:799,801c + + $resourcePath = '/user/login'; + $formParams = []; +====1 +1:516,521c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:816,818c +3:804,806c + $httpBody = ''; + $multipart = false; + $returnType = 'string'; +====1 +1:525c + $queryParams['username'] = $this->apiClient->getSerializer()->toQueryValue($username); +2:822c +3:810c + $queryParams['username'] = ObjectSerializer::toQueryValue($username); +====1 +1:529c + $queryParams['password'] = $this->apiClient->getSerializer()->toQueryValue($password); +2:826c +3:814c + $queryParams['password'] = ObjectSerializer::toQueryValue($password); +==== +1:531c + +2:828,836c + <<<<<<< HEAD + + + + ||||||| 4479382ced + + ======= + + >>>>>>> TEMP_RIGHT_BRANCH +3:816,818c + + + +====1 +1:534a +2:840c +3:822c + +====1 +1:536c + $httpBody = $formParams; // for HTTP post (form) +2:842,854c +3:824,836c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:538,547c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + 'string', + '/user/login' +2:856,864c +3:838,846c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:548a +2:866,927c +3:848,909c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:550c + return [$this->apiClient->getSerializer()->deserialize($response, 'string', $httpHeader), $statusCode, $httpHeader]; +2:928a +3:910a +====1 +1:554c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), 'string', $e->getResponseHeaders()); +2:932c +3:914c + $data = ObjectSerializer::deserialize($e->getResponseBody(), 'string', $e->getResponseHeaders()); +====1 +1:558c + +2:935a +3:917a +====1 +1:562c + +2:938a +3:920a +====1 +1:568a +2:945c +3:927c + * @throws \InvalidArgumentException +====1 +1:573,574c + list($response) = $this->logoutUserWithHttpInfo(); + return $response; +2:950c +3:932c + $this->logoutUserWithHttpInfo(); +====1 +1:582a +2:959c +3:941c + * @throws \InvalidArgumentException +====1 +1:587,589c + // parse inputs + $resourcePath = "/user/logout"; + $httpBody = ''; +2:964,966c +3:946,948c + + $resourcePath = '/user/logout'; + $formParams = []; +====1 +1:592,597c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:969,974c +3:951,956c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + + +====1 +1:599c + +2:975a +3:957a +====1 +1:602a +2:979c +3:961c + +====1 +1:604c + $httpBody = $formParams; // for HTTP post (form) +2:981,993c +3:963,975c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:606,615c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/logout' +2:995,998c +3:977,980c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:616a +2:1000,1052c +3:982,1034c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:618c + return [null, $statusCode, $httpHeader]; +2:1053a +3:1035a +====1 +1:622c + +2:1056a +3:1038a +====1 +1:626c + +2:1059a +3:1041a +====1 +1:634a +2:1068c +3:1050c + * @throws \InvalidArgumentException +====1 +1:639,640c + list($response) = $this->updateUserWithHttpInfo($username, $body); + return $response; +2:1073c +3:1055c + $this->updateUserWithHttpInfo($username, $body); +====1 +1:650a +2:1084c +3:1066c + * @throws \InvalidArgumentException +====1 +1:663,665c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:1097,1099c +3:1079,1081c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:668,673c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:1102,1105c +3:1084,1087c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:677,681c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:1109c +3:1091c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +====1 +1:682a +2:1111c +3:1093c + +====1 +1:691a +2:1121c +3:1103c + +====1 +1:693c + $httpBody = $formParams; // for HTTP post (form) +2:1123,1135c +3:1105,1117c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:695,704c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'PUT', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/{username}' +2:1137,1145c +3:1119,1127c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:705a +2:1147,1194c +3:1129,1176c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'PUT', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:707c + return [null, $statusCode, $httpHeader]; +2:1195a +3:1177a +====1 +1:711c + +2:1198a +3:1180a diff --git a/src/python/merge_conflict_analysis_diffs/1897/intellimerge/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/1897/intellimerge/diff_VERSION.txt new file mode 100644 index 0000000000..ce107c7353 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/intellimerge/diff_VERSION.txt @@ -0,0 +1,10 @@ +1,6c1 +< <<<<<<< HEAD +< 2.3.0-SNAPSHOT +< ||||||| 4479382ced +< ======= +< 2.2.3-SNAPSHOT +< >>>>>>> TEMP_RIGHT_BRANCH +--- +> 2.3.0-SNAPSHOT +\ No newline at end of file diff --git a/src/python/merge_conflict_analysis_diffs/1897/intellimerge/diff_io.swagger.codegen.CodegenConfig.txt b/src/python/merge_conflict_analysis_diffs/1897/intellimerge/diff_io.swagger.codegen.CodegenConfig.txt new file mode 100644 index 0000000000..c523742933 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/intellimerge/diff_io.swagger.codegen.CodegenConfig.txt @@ -0,0 +1,20 @@ +==== +1:2c + io.swagger.codegen.languages.AspNet5ServerCodegen +2:2,8c + <<<<<<< HEAD + ||||||| 4479382ced + io.swagger.codegen.languages.AspNet5ServerCodegen + ======= + io.swagger.codegen.languages.ApexClientCodegen + io.swagger.codegen.languages.AspNet5ServerCodegen + >>>>>>> TEMP_RIGHT_BRANCH +3:2c + io.swagger.codegen.languages.ApexClientCodegen +====3 +1:67c +2:73c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen +\ No newline at end of file +3:67c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen diff --git a/src/python/merge_conflict_analysis_diffs/1897/spork/diff_DefaultCodegen.java.txt b/src/python/merge_conflict_analysis_diffs/1897/spork/diff_DefaultCodegen.java.txt new file mode 100644 index 0000000000..f9c6d593c4 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/spork/diff_DefaultCodegen.java.txt @@ -0,0 +1,48 @@ +====3 +1:104a +2:104a +3:105c + protected boolean removeOperationIdPrefix; +====3 +1:162a +2:162a +3:164,168c + + if (additionalProperties.containsKey(CodegenConstants.REMOVE_OPERATION_ID_PREFIX)) { + this.setSortParamsByRequiredFlag(Boolean.valueOf(additionalProperties + .get(CodegenConstants.REMOVE_OPERATION_ID_PREFIX).toString())); + } +====3 +1:852c +2:852c + //name formatting options +3:858c + // name formatting options +====3 +1:1984a +2:1984a +3:1991,1997c + // remove prefix in operationId + if (removeOperationIdPrefix) { + int offset = operationId.indexOf('_'); + if (offset > -1) { + operationId = operationId.substring(offset+1); + } + } +====1 +1:1991c + op.tags = operation.getTags(); +2:1990a +3:2003a +====3 +1:3197a +2:3196a +3:3210,3217c + public boolean isRemoveOperationIdPrefix() { + return removeOperationIdPrefix; + } + + public void setRemoveOperationIdPrefix(boolean removeOperationIdPrefix) { + this.removeOperationIdPrefix = removeOperationIdPrefix; + } + diff --git a/src/python/merge_conflict_analysis_diffs/1897/spork/diff_Models.swift.txt b/src/python/merge_conflict_analysis_diffs/1897/spork/diff_Models.swift.txt new file mode 100644 index 0000000000..8a5c872118 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/spork/diff_Models.swift.txt @@ -0,0 +1,1866 @@ +====1 +1:14c + case Error(Int, Data?, Error) +2:14,15c +3:14,15c + case HttpError(statusCode: Int, data: Data?, error: Error) + case DecodeError(response: Data?, decodeError: DecodeError) +====1 +1:37a +2:39,60c +3:39,60c + public enum Decoded { + case success(ValueType) + case failure(DecodeError) + } + + public extension Decoded { + var value: ValueType? { + switch self { + case let .success(value): + return value + case .failure: + return nil + } + } + } + + public enum DecodeError { + case typeMismatch(expected: String, actual: String) + case missingKey(key: String) + case parseError(message: String) + } + +====1 +1:42c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> T)) { +2:65c +3:65c + static func addDecoder(clazz: T.Type, decoder: @escaping ((AnyObject, AnyObject?) -> Decoded)) { +====1 +1:47,50c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> T { + let key = discriminator; + if let decoder = decoders[key] { + return decoder(source, nil) as! T +2:70,73c +3:70,73c + static func decode(clazz: T.Type, discriminator: String, source: AnyObject) -> Decoded { + let key = discriminator + if let decoder = decoders[key], let value = decoder(source, nil) as? Decoded { + return value +====1 +1:52c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:75c +3:75c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:56,58c + static func decode(clazz: [T].Type, source: AnyObject) -> [T] { + let array = source as! [AnyObject] + return array.map { Decoders.decode(clazz: T.self, source: $0, instance: nil) } +2:79,93c +3:79,93c + static func decode(clazz: [T].Type, source: AnyObject) -> Decoded<[T]> { + if let sourceArray = source as? [AnyObject] { + var values = [T]() + for sourceValue in sourceArray { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): + values.append(value) + case let .failure(error): + return .failure(error) + } + } + return .success(values) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } +====1 +1:61,65c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> [Key:T] { + let sourceDictionary = source as! [Key: AnyObject] + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + dictionary[key] = Decoders.decode(clazz: T.self, source: value, instance: nil) +2:96,122c +3:96,122c + static func decode(clazz: [Key:T].Type, source: AnyObject) -> Decoded<[Key:T]> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): + dictionary[key] = value + case let .failure(error): + return .failure(error) + } + } + return .success(dictionary) + } else { + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) + } + } + + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + guard !(source is NSNull), source != nil else { return .success(nil) } + if let value = source as? T.RawValue { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "\(T.RawValue.self) matching a case from the enumeration \(T.self)", actual: String(describing: type(of: source)))) +====1 +1:67c + return dictionary +2:123a +3:123a +====1 +1:70c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> T { +2:126c +3:126c + static func decode(clazz: T.Type, source: AnyObject, instance: AnyObject?) -> Decoded { +====1 +1:72,73c + if T.self is Int32.Type && source is NSNumber { + return (source as! NSNumber).int32Value as! T; +2:128,129c +3:128,129c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int32.Type { + return .success(value) +====1 +1:75,76c + if T.self is Int64.Type && source is NSNumber { + return source.int64Value as! T; +2:131,132c +3:131,132c + if let value = source.int32Value as? T, source is NSNumber, T.self is Int64.Type { + return .success(value) +====1 +1:78,79c + if T.self is UUID.Type && source is String { + return UUID(uuidString: source as! String) as! T +2:134,135c +3:134,135c + if let intermediate = source as? String, let value = UUID(uuidString: intermediate) as? T, source is String, T.self is UUID.Type { + return .success(value) +====1 +1:81,82c + if source is T { + return source as! T +2:137,138c +3:137,138c + if let value = source as? T { + return .success(value) +====1 +1:84,85c + if T.self is Data.Type && source is String { + return Data(base64Encoded: source as! String) as! T +2:140,141c +3:140,141c + if let intermediate = source as? String, let value = Data(base64Encoded: intermediate) as? T { + return .success(value) +====1 +1:89,90c + if let decoder = decoders[key] { + return decoder(source, instance) as! T +2:145,146c +3:145,146c + if let decoder = decoders[key], let value = decoder(source, instance) as? Decoded { + return value +====1 +1:92c + fatalError("Source \(source) is not convertible to type \(clazz): Maybe swagger file is insufficient") +2:148c +3:148c + return .failure(.typeMismatch(expected: String(describing: clazz), actual: String(describing: source))) +====1 +1:96,102c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> T? { + if source is NSNull { + return nil + } + return source.map { (source: AnyObject) -> T in + Decoders.decode(clazz: clazz, source: source, instance: nil) + } +2:152,154c +3:152,154c + //Convert a Decoded so that its value is optional. DO WE STILL NEED THIS? + static func toOptional(decoded: Decoded) -> Decoded { + return .success(decoded.value) +====1 +1:105,107c + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> [T]? { + if source is NSNull { + return nil +2:157,164c +3:157,164c + static func decodeOptional(clazz: T.Type, source: AnyObject?) -> Decoded { + if let source = source, !(source is NSNull) { + switch Decoders.decode(clazz: clazz, source: source, instance: nil) { + case let .success(value): return .success(value) + case let .failure(error): return .failure(error) + } + } else { + return .success(nil) +====1 +1:109,110c + return source.map { (someSource: AnyObject) -> [T] in + Decoders.decode(clazz: clazz, source: someSource) +2:166,179c +3:166,179c + } + + static func decodeOptional(clazz: [T].Type, source: AnyObject?) -> Decoded<[T]?> { + if let source = source as? [AnyObject] { + var values = [T]() + for sourceValue in source { + switch Decoders.decode(clazz: T.self, source: sourceValue, instance: nil) { + case let .success(value): values.append(value) + case let .failure(error): return .failure(error) + } + } + return .success(values) + } else { + return .success(nil) +====1 +1:114,116c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> [Key:T]? { + if source is NSNull { + return nil +2:183,194c +3:183,194c + static func decodeOptional(clazz: [Key:T].Type, source: AnyObject?) -> Decoded<[Key:T]?> { + if let sourceDictionary = source as? [Key: AnyObject] { + var dictionary = [Key:T]() + for (key, value) in sourceDictionary { + switch Decoders.decode(clazz: T.self, source: value, instance: nil) { + case let .success(value): dictionary[key] = value + case let .failure(error): return .failure(error) + } + } + return .success(dictionary) + } else { + return .success(nil) +====1 +1:118,119c + return source.map { (someSource: AnyObject) -> [Key:T] in + Decoders.decode(clazz: clazz, source: someSource) +2:196,206c +3:196,206c + } + + static func decodeOptional(clazz: T, source: AnyObject) -> Decoded { + if let value = source as? U { + if let enumValue = T.init(rawValue: value) { + return .success(enumValue) + } else { + return .failure(.typeMismatch(expected: "A value from the enumeration \(T.self)", actual: "\(value)")) + } + } else { + return .failure(.typeMismatch(expected: "String", actual: String(describing: type(of: source)))) +====1 +1:122a +2:210c +3:210c + +====1 +1:137c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Date in +2:225c +3:225c + Decoders.addDecoder(clazz: Date.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:141c + return date +2:229c +3:229c + return .success(date) +====1 +1:145c + if let sourceInt = source as? Int64 { +2:233c +3:233c + if let sourceInt = source as? Int { +====1 +1:147c + return Date(timeIntervalSince1970: Double(sourceInt / 1000) ) +2:235,250c +3:235,250c + return .success(Date(timeIntervalSince1970: Double(sourceInt / 1000) )) + } + if source is String || source is Int { + return .failure(.parseError(message: "Could not decode date")) + } else { + return .failure(.typeMismatch(expected: "String or Int", actual: "\(source)")) + } + } + + // Decoder for ISOFullDate + Decoders.addDecoder(clazz: ISOFullDate.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let string = source as? String, + let isoDate = ISOFullDate.from(string: string) { + return .success(isoDate) + } else { + return .failure(.typeMismatch(expected: "ISO date", actual: "\(source)")) +====1 +1:149c + fatalError("formatter failed to parse \(source)") +2:251a +3:251a +====1 +1:152,155c + // Decoder for [AdditionalPropertiesClass] + Decoders.addDecoder(clazz: [AdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [AdditionalPropertiesClass] in + return Decoders.decode(clazz: [AdditionalPropertiesClass].self, source: source) + } +2:253a +3:253a +====1 +1:157,163c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> AdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + + result.mapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_property"] as AnyObject?) + result.mapOfMapProperty = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_of_map_property"] as AnyObject?) + return result +2:255,273c +3:255,273c + Decoders.addDecoder(clazz: AdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? AdditionalPropertiesClass() : instance as! AdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: [String:String].self, source: sourceDictionary["map_property"] as AnyObject?) { + + case let .success(value): result.mapProperty = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_of_map_property"] as AnyObject?) { + + case let .success(value): result.mapOfMapProperty = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "AdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:167,170c + // Decoder for [Animal] + Decoders.addDecoder(clazz: [Animal].self) { (source: AnyObject, instance: AnyObject?) -> [Animal] in + return Decoders.decode(clazz: [Animal].self, source: source) + } +2:276a +3:276a +====1 +1:172,176c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Animal in + let sourceDictionary = source as! [AnyHashable: Any] + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal" { + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) +2:278,299c +3:278,299c + Decoders.addDecoder(clazz: Animal.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + // Check discriminator to support inheritance + if let discriminator = sourceDictionary["className"] as? String, instance == nil && discriminator != "Animal"{ + return Decoders.decode(clazz: Animal.self, discriminator: discriminator, source: source) + } + let result = instance == nil ? Animal() : instance as! Animal + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Animal", actual: "\(source)")) +====1 +1:178,182c + let result = instance == nil ? Animal() : instance as! Animal + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + return result +2:300a +3:300a +====1 +1:186,194c + // Decoder for [AnimalFarm] + Decoders.addDecoder(clazz: [AnimalFarm].self) { (source: AnyObject, instance: AnyObject?) -> [AnimalFarm] in + return Decoders.decode(clazz: [AnimalFarm].self, source: source) + } + // Decoder for AnimalFarm + Decoders.addDecoder(clazz: AnimalFarm.self) { (source: AnyObject, instance: AnyObject?) -> AnimalFarm in + let sourceArray = source as! [AnyObject] + return sourceArray.map({ Decoders.decode(clazz: Animal.self, source: $0, instance: nil) }) + } +2:303a +3:303a +====1 +1:197,200c + // Decoder for [ApiResponse] + Decoders.addDecoder(clazz: [ApiResponse].self) { (source: AnyObject, instance: AnyObject?) -> [ApiResponse] in + return Decoders.decode(clazz: [ApiResponse].self, source: source) + } +2:306c +3:306c + +====1 +1:202,209c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> ApiResponse in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + + result.code = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) + result.type = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) + result.message = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) + return result +2:308,332c +3:308,332c + Decoders.addDecoder(clazz: ApiResponse.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ApiResponse() : instance as! ApiResponse + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["code"] as AnyObject?) { + + case let .success(value): result.code = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["type"] as AnyObject?) { + + case let .success(value): result.type = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["message"] as AnyObject?) { + + case let .success(value): result.message = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ApiResponse", actual: "\(source)")) + } +====1 +1:213,216c + // Decoder for [ArrayOfArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfArrayOfNumberOnly].self, source: source) + } +2:335a +3:335a +====1 +1:218,223c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + + result.arrayArrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) + return result +2:337,349c +3:337,349c + Decoders.addDecoder(clazz: ArrayOfArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfArrayOfNumberOnly() : instance as! ArrayOfArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [[Double]].self, source: sourceDictionary["ArrayArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayArrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:227,230c + // Decoder for [ArrayOfNumberOnly] + Decoders.addDecoder(clazz: [ArrayOfNumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayOfNumberOnly] in + return Decoders.decode(clazz: [ArrayOfNumberOnly].self, source: source) + } +2:352a +3:352a +====1 +1:232,237c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> ArrayOfNumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + + result.arrayNumber = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["ArrayNumber"] as AnyObject?) + return result +2:354,366c +3:354,366c + Decoders.addDecoder(clazz: ArrayOfNumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayOfNumberOnly() : instance as! ArrayOfNumberOnly + switch Decoders.decodeOptional(clazz: [Double].self, source: sourceDictionary["ArrayNumber"] as AnyObject?) { + + case let .success(value): result.arrayNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayOfNumberOnly", actual: "\(source)")) + } +====1 +1:241,244c + // Decoder for [ArrayTest] + Decoders.addDecoder(clazz: [ArrayTest].self) { (source: AnyObject, instance: AnyObject?) -> [ArrayTest] in + return Decoders.decode(clazz: [ArrayTest].self, source: source) + } +2:369a +3:369a +====1 +1:246,253c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> ArrayTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + + result.arrayOfString = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_of_string"] as AnyObject?) + result.arrayArrayOfInteger = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) + result.arrayArrayOfModel = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_array_of_model"] as AnyObject?) + return result +2:371,395c +3:371,395c + Decoders.addDecoder(clazz: ArrayTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ArrayTest() : instance as! ArrayTest + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["array_of_string"] as AnyObject?) { + + case let .success(value): result.arrayOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[Int64]].self, source: sourceDictionary["array_array_of_integer"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [[ReadOnlyFirst]].self, source: sourceDictionary["array_array_of_model"] as AnyObject?) { + + case let .success(value): result.arrayArrayOfModel = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ArrayTest", actual: "\(source)")) + } +====1 +1:257,260c + // Decoder for [Capitalization] + Decoders.addDecoder(clazz: [Capitalization].self) { (source: AnyObject, instance: AnyObject?) -> [Capitalization] in + return Decoders.decode(clazz: [Capitalization].self, source: source) + } +2:398a +3:398a +====1 +1:262,272c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Capitalization in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Capitalization() : instance as! Capitalization + + result.smallCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) + result.capitalCamel = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) + result.smallSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) + result.capitalSnake = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) + result.sCAETHFlowPoints = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) + result.ATT_NAME = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) + return result +2:400,442c +3:400,442c + Decoders.addDecoder(clazz: Capitalization.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Capitalization() : instance as! Capitalization + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["smallCamel"] as AnyObject?) { + + case let .success(value): result.smallCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["CapitalCamel"] as AnyObject?) { + + case let .success(value): result.capitalCamel = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["small_Snake"] as AnyObject?) { + + case let .success(value): result.smallSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["Capital_Snake"] as AnyObject?) { + + case let .success(value): result.capitalSnake = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["SCA_ETH_Flow_Points"] as AnyObject?) { + + case let .success(value): result.sCAETHFlowPoints = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["ATT_NAME"] as AnyObject?) { + + case let .success(value): result.ATT_NAME = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Capitalization", actual: "\(source)")) + } +====1 +1:276,279c + // Decoder for [Cat] + Decoders.addDecoder(clazz: [Cat].self) { (source: AnyObject, instance: AnyObject?) -> [Cat] in + return Decoders.decode(clazz: [Cat].self, source: source) + } +2:445a +3:445a +====1 +1:281,291c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Cat in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.declawed = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) + return result +2:447,474c +3:447,474c + Decoders.addDecoder(clazz: Cat.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Cat() : instance as! Cat + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["declawed"] as AnyObject?) { + + case let .success(value): result.declawed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Cat", actual: "\(source)")) + } +====1 +1:295,298c + // Decoder for [Category] + Decoders.addDecoder(clazz: [Category].self) { (source: AnyObject, instance: AnyObject?) -> [Category] in + return Decoders.decode(clazz: [Category].self, source: source) + } +2:477a +3:477a +====1 +1:300,306c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Category in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Category() : instance as! Category + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:479,497c +3:479,497c + Decoders.addDecoder(clazz: Category.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Category() : instance as! Category + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Category", actual: "\(source)")) + } +====1 +1:310,313c + // Decoder for [ClassModel] + Decoders.addDecoder(clazz: [ClassModel].self) { (source: AnyObject, instance: AnyObject?) -> [ClassModel] in + return Decoders.decode(clazz: [ClassModel].self, source: source) + } +2:500a +3:500a +====1 +1:315,320c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> ClassModel in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ClassModel() : instance as! ClassModel + + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) + return result +2:502,514c +3:502,514c + Decoders.addDecoder(clazz: ClassModel.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ClassModel() : instance as! ClassModel + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["_class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ClassModel", actual: "\(source)")) + } +====1 +1:324,327c + // Decoder for [Client] + Decoders.addDecoder(clazz: [Client].self) { (source: AnyObject, instance: AnyObject?) -> [Client] in + return Decoders.decode(clazz: [Client].self, source: source) + } +2:517a +3:517a +====1 +1:329,334c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Client in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Client() : instance as! Client + + result.client = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) + return result +2:519,531c +3:519,531c + Decoders.addDecoder(clazz: Client.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Client() : instance as! Client + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["client"] as AnyObject?) { + + case let .success(value): result.client = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Client", actual: "\(source)")) + } +====1 +1:338,341c + // Decoder for [Dog] + Decoders.addDecoder(clazz: [Dog].self) { (source: AnyObject, instance: AnyObject?) -> [Dog] in + return Decoders.decode(clazz: [Dog].self, source: source) + } +2:534a +3:534a +====1 +1:343,353c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Dog in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + + result.className = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) + result.color = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) + result.breed = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) + return result +2:536,563c +3:536,563c + Decoders.addDecoder(clazz: Dog.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Dog() : instance as! Dog + if decoders["\(Animal.self)"] != nil { + _ = Decoders.decode(clazz: Animal.self, source: source, instance: result) + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["className"] as AnyObject?) { + + case let .success(value): result.className = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["color"] as AnyObject?) { + + case let .success(value): result.color = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["breed"] as AnyObject?) { + + case let .success(value): result.breed = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Dog", actual: "\(source)")) + } +====1 +1:357,360c + // Decoder for [EnumArrays] + Decoders.addDecoder(clazz: [EnumArrays].self) { (source: AnyObject, instance: AnyObject?) -> [EnumArrays] in + return Decoders.decode(clazz: [EnumArrays].self, source: source) + } +2:566a +3:566a +====1 +1:362,371c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> EnumArrays in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + + if let justSymbol = sourceDictionary["just_symbol"] as? String { + result.justSymbol = EnumArrays.JustSymbol(rawValue: (justSymbol)) + } + + if let arrayEnum = sourceDictionary["array_enum"] as? [String] { + result.arrayEnum = arrayEnum.map ({ EnumArrays.ArrayEnum(rawValue: $0)! }) +2:568,585c +3:568,585c + Decoders.addDecoder(clazz: EnumArrays.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumArrays() : instance as! EnumArrays + switch Decoders.decodeOptional(clazz: EnumArrays.JustSymbol.self, source: sourceDictionary["just_symbol"] as AnyObject?) { + + case let .success(value): result.justSymbol = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["array_enum"] as AnyObject?) { + + case let .success(value): result.arrayEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumArrays", actual: "\(source)")) +====1 +1:373,374c + + return result +2:586a +3:586a +====1 +1:378,381c + // Decoder for [EnumClass] + Decoders.addDecoder(clazz: [EnumClass].self) { (source: AnyObject, instance: AnyObject?) -> [EnumClass] in + return Decoders.decode(clazz: [EnumClass].self, source: source) + } +2:589a +3:589a +====1 +1:383,389c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> EnumClass in + if let source = source as? String { + if let result = EnumClass(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type EnumClass: Maybe swagger file is insufficient") +2:591,593c +3:591,593c + Decoders.addDecoder(clazz: EnumClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: EnumClass.self, source: source, instance: instance) +====1 +1:393,396c + // Decoder for [EnumTest] + Decoders.addDecoder(clazz: [EnumTest].self) { (source: AnyObject, instance: AnyObject?) -> [EnumTest] in + return Decoders.decode(clazz: [EnumTest].self, source: source) + } +2:596a +3:596a +====1 +1:398,415c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> EnumTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? EnumTest() : instance as! EnumTest + + if let enumString = sourceDictionary["enum_string"] as? String { + result.enumString = EnumTest.EnumString(rawValue: (enumString)) + } + + if let enumInteger = sourceDictionary["enum_integer"] as? Int32 { + result.enumInteger = EnumTest.EnumInteger(rawValue: (enumInteger)) + } + + if let enumNumber = sourceDictionary["enum_number"] as? Double { + result.enumNumber = EnumTest.EnumNumber(rawValue: (enumNumber)) + } + + result.outerEnum = Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) + return result +2:598,628c +3:598,628c + Decoders.addDecoder(clazz: EnumTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? EnumTest() : instance as! EnumTest + switch Decoders.decodeOptional(clazz: EnumTest.EnumString.self, source: sourceDictionary["enum_string"] as AnyObject?) { + + case let .success(value): result.enumString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumInteger.self, source: sourceDictionary["enum_integer"] as AnyObject?) { + + case let .success(value): result.enumInteger = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: EnumTest.EnumNumber.self, source: sourceDictionary["enum_number"] as AnyObject?) { + + case let .success(value): result.enumNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterEnum.self, source: sourceDictionary["outerEnum"] as AnyObject?) { + + case let .success(value): result.outerEnum = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "EnumTest", actual: "\(source)")) + } +====1 +1:419,422c + // Decoder for [FormatTest] + Decoders.addDecoder(clazz: [FormatTest].self) { (source: AnyObject, instance: AnyObject?) -> [FormatTest] in + return Decoders.decode(clazz: [FormatTest].self, source: source) + } +2:631a +3:631a +====1 +1:424,441c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> FormatTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? FormatTest() : instance as! FormatTest + + result.integer = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) + result.int32 = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) + result.int64 = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) + result.number = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) + result.float = Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) + result.double = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) + result.string = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) + result.byte = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) + result.binary = Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) + result.date = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["date"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + return result +2:633,717c +3:633,717c + Decoders.addDecoder(clazz: FormatTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? FormatTest() : instance as! FormatTest + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["integer"] as AnyObject?) { + + case let .success(value): result.integer = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["int32"] as AnyObject?) { + + case let .success(value): result.int32 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["int64"] as AnyObject?) { + + case let .success(value): result.int64 = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["number"] as AnyObject?) { + + case let .success(value): result.number = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Float.self, source: sourceDictionary["float"] as AnyObject?) { + + case let .success(value): result.float = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["double"] as AnyObject?) { + + case let .success(value): result.double = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["string"] as AnyObject?) { + + case let .success(value): result.string = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["byte"] as AnyObject?) { + + case let .success(value): result.byte = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Data.self, source: sourceDictionary["binary"] as AnyObject?) { + + case let .success(value): result.binary = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: ISOFullDate.self, source: sourceDictionary["date"] as AnyObject?) { + + case let .success(value): result.date = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "FormatTest", actual: "\(source)")) + } +====1 +1:445,448c + // Decoder for [HasOnlyReadOnly] + Decoders.addDecoder(clazz: [HasOnlyReadOnly].self) { (source: AnyObject, instance: AnyObject?) -> [HasOnlyReadOnly] in + return Decoders.decode(clazz: [HasOnlyReadOnly].self, source: source) + } +2:720a +3:720a +====1 +1:450,456c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> HasOnlyReadOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.foo = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) + return result +2:722,740c +3:722,740c + Decoders.addDecoder(clazz: HasOnlyReadOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? HasOnlyReadOnly() : instance as! HasOnlyReadOnly + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["foo"] as AnyObject?) { + + case let .success(value): result.foo = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "HasOnlyReadOnly", actual: "\(source)")) + } +====1 +1:460,463c + // Decoder for [List] + Decoders.addDecoder(clazz: [List].self) { (source: AnyObject, instance: AnyObject?) -> [List] in + return Decoders.decode(clazz: [List].self, source: source) + } +2:743a +3:743a +====1 +1:465,470c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> List in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? List() : instance as! List + + result._123List = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) + return result +2:745,757c +3:745,757c + Decoders.addDecoder(clazz: List.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? List() : instance as! List + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["123-list"] as AnyObject?) { + + case let .success(value): result._123List = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "List", actual: "\(source)")) + } +====1 +1:474,477c + // Decoder for [MapTest] + Decoders.addDecoder(clazz: [MapTest].self) { (source: AnyObject, instance: AnyObject?) -> [MapTest] in + return Decoders.decode(clazz: [MapTest].self, source: source) + } +2:760a +3:760a +====1 +1:479,484c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> MapTest in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MapTest() : instance as! MapTest + + result.mapMapOfString = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map_map_of_string"] as AnyObject?) + if let mapOfEnumString = sourceDictionary["map_of_enum_string"] as? [String:String] { //TODO: handle enum map scenario +2:762,779c +3:762,779c + Decoders.addDecoder(clazz: MapTest.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MapTest() : instance as! MapTest + switch Decoders.decodeOptional(clazz: [String:[String:String]].self, source: sourceDictionary["map_map_of_string"] as AnyObject?) { + + case let .success(value): result.mapMapOfString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: MapTest.MapOfEnumString.self, source: sourceDictionary["map_of_enum_string"] as AnyObject?) { + /* + case let .success(value): result.mapOfEnumString = value + case let .failure(error): return .failure(error) + */ default: break //TODO: handle enum map scenario + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MapTest", actual: "\(source)")) +====1 +1:486,487c + + return result +2:780a +3:780a +====1 +1:491,494c + // Decoder for [MixedPropertiesAndAdditionalPropertiesClass] + Decoders.addDecoder(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self) { (source: AnyObject, instance: AnyObject?) -> [MixedPropertiesAndAdditionalPropertiesClass] in + return Decoders.decode(clazz: [MixedPropertiesAndAdditionalPropertiesClass].self, source: source) + } +2:783a +3:783a +====1 +1:496,503c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> MixedPropertiesAndAdditionalPropertiesClass in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + + result.uuid = Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) + result.dateTime = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) + result.map = Decoders.decodeOptional(clazz: Dictionary.self, source: sourceDictionary["map"] as AnyObject?) + return result +2:785,809c +3:785,809c + Decoders.addDecoder(clazz: MixedPropertiesAndAdditionalPropertiesClass.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? MixedPropertiesAndAdditionalPropertiesClass() : instance as! MixedPropertiesAndAdditionalPropertiesClass + switch Decoders.decodeOptional(clazz: UUID.self, source: sourceDictionary["uuid"] as AnyObject?) { + + case let .success(value): result.uuid = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["dateTime"] as AnyObject?) { + + case let .success(value): result.dateTime = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String:Animal].self, source: sourceDictionary["map"] as AnyObject?) { + + case let .success(value): result.map = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "MixedPropertiesAndAdditionalPropertiesClass", actual: "\(source)")) + } +====1 +1:507,510c + // Decoder for [Model200Response] + Decoders.addDecoder(clazz: [Model200Response].self) { (source: AnyObject, instance: AnyObject?) -> [Model200Response] in + return Decoders.decode(clazz: [Model200Response].self, source: source) + } +2:812a +3:812a +====1 +1:512,518c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Model200Response in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Model200Response() : instance as! Model200Response + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result._class = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) + return result +2:814,832c +3:814,832c + Decoders.addDecoder(clazz: Model200Response.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Model200Response() : instance as! Model200Response + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["class"] as AnyObject?) { + + case let .success(value): result._class = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Model200Response", actual: "\(source)")) + } +====1 +1:522,525c + // Decoder for [Name] + Decoders.addDecoder(clazz: [Name].self) { (source: AnyObject, instance: AnyObject?) -> [Name] in + return Decoders.decode(clazz: [Name].self, source: source) + } +2:835a +3:835a +====1 +1:527,535c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Name in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Name() : instance as! Name + + result.name = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) + result.snakeCase = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) + result.property = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) + result._123Number = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) + return result +2:837,867c +3:837,867c + Decoders.addDecoder(clazz: Name.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Name() : instance as! Name + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["snake_case"] as AnyObject?) { + + case let .success(value): result.snakeCase = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["property"] as AnyObject?) { + + case let .success(value): result.property = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["123Number"] as AnyObject?) { + + case let .success(value): result._123Number = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Name", actual: "\(source)")) + } +====1 +1:539,542c + // Decoder for [NumberOnly] + Decoders.addDecoder(clazz: [NumberOnly].self) { (source: AnyObject, instance: AnyObject?) -> [NumberOnly] in + return Decoders.decode(clazz: [NumberOnly].self, source: source) + } +2:870a +3:870a +====1 +1:544,549c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> NumberOnly in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + + result.justNumber = Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) + return result +2:872,884c +3:872,884c + Decoders.addDecoder(clazz: NumberOnly.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? NumberOnly() : instance as! NumberOnly + switch Decoders.decodeOptional(clazz: Double.self, source: sourceDictionary["JustNumber"] as AnyObject?) { + + case let .success(value): result.justNumber = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "NumberOnly", actual: "\(source)")) + } +====1 +1:553,556c + // Decoder for [Order] + Decoders.addDecoder(clazz: [Order].self) { (source: AnyObject, instance: AnyObject?) -> [Order] in + return Decoders.decode(clazz: [Order].self, source: source) + } +2:887a +3:887a +====1 +1:558,571c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Order in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Order() : instance as! Order + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.petId = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) + result.quantity = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) + result.shipDate = Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Order.Status(rawValue: (status)) + } + + result.complete = Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) + return result +2:889,931c +3:889,931c + Decoders.addDecoder(clazz: Order.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Order() : instance as! Order + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["petId"] as AnyObject?) { + + case let .success(value): result.petId = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["quantity"] as AnyObject?) { + + case let .success(value): result.quantity = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Date.self, source: sourceDictionary["shipDate"] as AnyObject?) { + + case let .success(value): result.shipDate = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Order.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Bool.self, source: sourceDictionary["complete"] as AnyObject?) { + + case let .success(value): result.complete = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Order", actual: "\(source)")) + } +====1 +1:575,578c + // Decoder for [OuterBoolean] + Decoders.addDecoder(clazz: [OuterBoolean].self) { (source: AnyObject) -> [OuterBoolean] in + return Decoders.decode(clazz: [OuterBoolean].self, source: source) + } +2:934a +3:934a +====1 +1:580c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject) -> OuterBoolean in +2:936c +3:936c + Decoders.addDecoder(clazz: OuterBoolean.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:582c + return source +2:938,940c +3:938,940c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterBoolean", actual: "\(source)")) +====1 +1:584c + fatalError("Source \(source) is not convertible to typealias OuterBoolean: Maybe swagger file is insufficient") +2:941a +3:941a +====1 +1:588,591c + // Decoder for [OuterComposite] + Decoders.addDecoder(clazz: [OuterComposite].self) { (source: AnyObject) -> [OuterComposite] in + return Decoders.decode(clazz: [OuterComposite].self, source: source) + } +2:944a +3:944a +====1 +1:593,600c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject) -> OuterComposite in + let sourceDictionary = source as! [AnyHashable: Any] + + let instance = OuterComposite() + instance.myNumber = Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) + instance.myString = Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) + instance.myBoolean = Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) + return instance +2:946,970c +3:946,970c + Decoders.addDecoder(clazz: OuterComposite.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? OuterComposite() : instance as! OuterComposite + switch Decoders.decodeOptional(clazz: OuterNumber.self, source: sourceDictionary["my_number"] as AnyObject?) { + + case let .success(value): result.myNumber = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterString.self, source: sourceDictionary["my_string"] as AnyObject?) { + + case let .success(value): result.myString = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: OuterBoolean.self, source: sourceDictionary["my_boolean"] as AnyObject?) { + + case let .success(value): result.myBoolean = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "OuterComposite", actual: "\(source)")) + } +====1 +1:604,607c + // Decoder for [OuterEnum] + Decoders.addDecoder(clazz: [OuterEnum].self) { (source: AnyObject, instance: AnyObject?) -> [OuterEnum] in + return Decoders.decode(clazz: [OuterEnum].self, source: source) + } +2:973a +3:973a +====1 +1:609,615c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> OuterEnum in + if let source = source as? String { + if let result = OuterEnum(rawValue: source) { + return result + } + } + fatalError("Source \(source) is not convertible to enum type OuterEnum: Maybe swagger file is insufficient") +2:975,977c +3:975,977c + Decoders.addDecoder(clazz: OuterEnum.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + //TODO: I don't think we need this anymore + return Decoders.decode(clazz: OuterEnum.self, source: source, instance: instance) +====1 +1:619,622c + // Decoder for [OuterNumber] + Decoders.addDecoder(clazz: [OuterNumber].self) { (source: AnyObject) -> [OuterNumber] in + return Decoders.decode(clazz: [OuterNumber].self, source: source) + } +2:980a +3:980a +====1 +1:624c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject) -> OuterNumber in +2:982c +3:982c + Decoders.addDecoder(clazz: OuterNumber.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:626c + return source +2:984,986c +3:984,986c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterNumber", actual: "\(source)")) +====1 +1:628c + fatalError("Source \(source) is not convertible to typealias OuterNumber: Maybe swagger file is insufficient") +2:987a +3:987a +====1 +1:632,635c + // Decoder for [OuterString] + Decoders.addDecoder(clazz: [OuterString].self) { (source: AnyObject) -> [OuterString] in + return Decoders.decode(clazz: [OuterString].self, source: source) + } +2:990a +3:990a +====1 +1:637c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject) -> OuterString in +2:992c +3:992c + Decoders.addDecoder(clazz: OuterString.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in +====1 +1:639c + return source +2:994,996c +3:994,996c + return .success(source) + } else { + return .failure(.typeMismatch(expected: "Typealias OuterString", actual: "\(source)")) +====1 +1:641c + fatalError("Source \(source) is not convertible to typealias OuterString: Maybe swagger file is insufficient") +2:997a +3:997a +====1 +1:645,648c + // Decoder for [Pet] + Decoders.addDecoder(clazz: [Pet].self) { (source: AnyObject, instance: AnyObject?) -> [Pet] in + return Decoders.decode(clazz: [Pet].self, source: source) + } +2:1000a +3:1000a +====1 +1:650,663c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Pet in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Pet() : instance as! Pet + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.category = Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + result.photoUrls = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["photoUrls"] as AnyObject?) + result.tags = Decoders.decodeOptional(clazz: Array.self, source: sourceDictionary["tags"] as AnyObject?) + if let status = sourceDictionary["status"] as? String { + result.status = Pet.Status(rawValue: (status)) + } + + return result +2:1002,1044c +3:1002,1044c + Decoders.addDecoder(clazz: Pet.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Pet() : instance as! Pet + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Category.self, source: sourceDictionary["category"] as AnyObject?) { + + case let .success(value): result.category = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [String].self, source: sourceDictionary["photoUrls"] as AnyObject?) { + + case let .success(value): result.photoUrls = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: [Tag].self, source: sourceDictionary["tags"] as AnyObject?) { + + case let .success(value): result.tags = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Pet.Status.self, source: sourceDictionary["status"] as AnyObject?) { + + case let .success(value): result.status = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Pet", actual: "\(source)")) + } +====1 +1:667,670c + // Decoder for [ReadOnlyFirst] + Decoders.addDecoder(clazz: [ReadOnlyFirst].self) { (source: AnyObject, instance: AnyObject?) -> [ReadOnlyFirst] in + return Decoders.decode(clazz: [ReadOnlyFirst].self, source: source) + } +2:1047a +3:1047a +====1 +1:672,678c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> ReadOnlyFirst in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + + result.bar = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) + result.baz = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) + return result +2:1049,1067c +3:1049,1067c + Decoders.addDecoder(clazz: ReadOnlyFirst.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? ReadOnlyFirst() : instance as! ReadOnlyFirst + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["bar"] as AnyObject?) { + + case let .success(value): result.bar = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["baz"] as AnyObject?) { + + case let .success(value): result.baz = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "ReadOnlyFirst", actual: "\(source)")) + } +====1 +1:682,685c + // Decoder for [Return] + Decoders.addDecoder(clazz: [Return].self) { (source: AnyObject, instance: AnyObject?) -> [Return] in + return Decoders.decode(clazz: [Return].self, source: source) + } +2:1070a +3:1070a +====1 +1:687,692c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Return in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Return() : instance as! Return + + result._return = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) + return result +2:1072,1084c +3:1072,1084c + Decoders.addDecoder(clazz: Return.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Return() : instance as! Return + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["return"] as AnyObject?) { + + case let .success(value): result._return = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Return", actual: "\(source)")) + } +====1 +1:696,699c + // Decoder for [SpecialModelName] + Decoders.addDecoder(clazz: [SpecialModelName].self) { (source: AnyObject, instance: AnyObject?) -> [SpecialModelName] in + return Decoders.decode(clazz: [SpecialModelName].self, source: source) + } +2:1087a +3:1087a +====1 +1:701,706c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> SpecialModelName in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + + result.specialPropertyName = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) + return result +2:1089,1101c +3:1089,1101c + Decoders.addDecoder(clazz: SpecialModelName.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? SpecialModelName() : instance as! SpecialModelName + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["$special[property.name]"] as AnyObject?) { + + case let .success(value): result.specialPropertyName = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "SpecialModelName", actual: "\(source)")) + } +====1 +1:710,713c + // Decoder for [Tag] + Decoders.addDecoder(clazz: [Tag].self) { (source: AnyObject, instance: AnyObject?) -> [Tag] in + return Decoders.decode(clazz: [Tag].self, source: source) + } +2:1104a +3:1104a +====1 +1:715,721c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Tag in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? Tag() : instance as! Tag + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.name = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) + return result +2:1106,1124c +3:1106,1124c + Decoders.addDecoder(clazz: Tag.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? Tag() : instance as! Tag + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["name"] as AnyObject?) { + + case let .success(value): result.name = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "Tag", actual: "\(source)")) + } +====1 +1:725,728c + // Decoder for [User] + Decoders.addDecoder(clazz: [User].self) { (source: AnyObject, instance: AnyObject?) -> [User] in + return Decoders.decode(clazz: [User].self, source: source) + } +2:1127a +3:1127a +====1 +1:730,742c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> User in + let sourceDictionary = source as! [AnyHashable: Any] + let result = instance == nil ? User() : instance as! User + + result.id = Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) + result.username = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) + result.firstName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) + result.lastName = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) + result.email = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) + result.password = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) + result.phone = Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) + result.userStatus = Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) + return result +2:1129,1183c +3:1129,1183c + Decoders.addDecoder(clazz: User.self) { (source: AnyObject, instance: AnyObject?) -> Decoded in + if let sourceDictionary = source as? [AnyHashable: Any] { + let result = instance == nil ? User() : instance as! User + switch Decoders.decodeOptional(clazz: Int64.self, source: sourceDictionary["id"] as AnyObject?) { + + case let .success(value): result.id = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["username"] as AnyObject?) { + + case let .success(value): result.username = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["firstName"] as AnyObject?) { + + case let .success(value): result.firstName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["lastName"] as AnyObject?) { + + case let .success(value): result.lastName = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["email"] as AnyObject?) { + + case let .success(value): result.email = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["password"] as AnyObject?) { + + case let .success(value): result.password = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: String.self, source: sourceDictionary["phone"] as AnyObject?) { + + case let .success(value): result.phone = value + case let .failure(error): return .failure(error) + + } + switch Decoders.decodeOptional(clazz: Int32.self, source: sourceDictionary["userStatus"] as AnyObject?) { + + case let .success(value): result.userStatus = value + case let .failure(error): return .failure(error) + + } + return .success(result) + } else { + return .failure(.typeMismatch(expected: "User", actual: "\(source)")) + } +====1 +1:749c + } +\ No newline at end of file +2:1190c +3:1190c + } diff --git a/src/python/merge_conflict_analysis_diffs/1897/spork/diff_PetApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/spork/diff_PetApi.php.txt new file mode 100644 index 0000000000..9eb97a1d7a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/spork/diff_PetApi.php.txt @@ -0,0 +1,1642 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return PetApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->addPetWithHttpInfo($body); + return $response; +2:96c +3:96c + $this->addPetWithHttpInfo($body); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/pet"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/pet'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/json', 'application/xml']); +2:120,124c +3:120,124c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:140a +2:135c +3:135c + +====1 +1:142c + $httpBody = $formParams; // for HTTP post (form) +2:137,149c +3:137,149c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:143a +2:151,162c +3:151,162c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/json', 'application/xml'] + ); + } + +====1 +1:145,146c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:164,165c +3:164,165c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:148c + // make the API Call +2:167,188c +3:167,188c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:150,158c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet' + ); +2:189a +3:189a +====1 +1:160c + return [null, $statusCode, $httpHeader]; +2:191,213c +3:191,213c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:164c + +2:216a +3:216a +====1 +1:168c + +2:219a +3:219a +====1 +1:176a +2:228c +3:228c + * @throws \InvalidArgumentException +====1 +1:181,182c + list($response) = $this->deletePetWithHttpInfo($pet_id, $api_key); + return $response; +2:233c +3:233c + $this->deletePetWithHttpInfo($pet_id, $api_key); +====1 +1:192a +2:244c +3:244c + * @throws \InvalidArgumentException +====1 +1:201,203c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:253,255c +3:253,255c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:206,211c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:258,260c +3:258,260c + $httpBody = ''; + $multipart = false; + $returnType = ''; +====1 +1:215c + $headerParams['api_key'] = $this->apiClient->getSerializer()->toHeaderValue($api_key); +2:264c +3:264c + $headerParams['api_key'] = ObjectSerializer::toHeaderValue($api_key); +====1 +1:216a +2:266c +3:266c + +====1 +1:219,223c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:269c +3:269c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +==== +1:225c + +2:271,272c + + +3:271,272c + + +====1 +1:228a +2:276c +3:276c + +====1 +1:230c + $httpBody = $formParams; // for HTTP post (form) +2:278,301c +3:278,301c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:231a +2:303c +3:303c + +====1 +1:233,234c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:305,306c +3:305,306c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:236c + // make the API Call +2:308,329c +3:308,329c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + +====1 +1:238,246c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet/{petId}' + ); +2:330a +3:330a +====1 +1:248c + return [null, $statusCode, $httpHeader]; +2:332,354c +3:332,354c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:252c + +2:357a +3:357a +====1 +1:256c + +2:360a +3:360a +====1 +1:263a +2:368c +3:368c + * @throws \InvalidArgumentException +====1 +1:278a +2:384c +3:384c + * @throws \InvalidArgumentException +====1 +1:287,289c + // parse inputs + $resourcePath = "/pet/findByStatus"; + $httpBody = ''; +2:393,395c +3:393,395c + + $resourcePath = '/pet/findByStatus'; + $formParams = []; +====1 +1:292,297c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:398,400c +3:398,400c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet[]'; +====1 +1:301c + $status = $this->apiClient->getSerializer()->serializeCollection($status, 'csv', true); +2:404c +3:404c + $status = ObjectSerializer::serializeCollection($status, 'csv', true); +====1 +1:304c + $queryParams['status'] = $this->apiClient->getSerializer()->toQueryValue($status); +2:407c +3:407c + $queryParams['status'] = ObjectSerializer::toQueryValue($status); +==== +1:306c + +2:409,411c + + + +3:409,411c + + + +====1 +1:309a +2:415c +3:415c + +====1 +1:311c + $httpBody = $formParams; // for HTTP post (form) +2:417,440c +3:417,440c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:312a +2:442c +3:442c + +====1 +1:314,315c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:444,445c +3:444,445c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:317c + // make the API Call +2:447,468c +3:447,468c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:319,327c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet[]', + '/pet/findByStatus' + ); +2:469a +3:469a +====1 +1:329c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet[]', $httpHeader), $statusCode, $httpHeader]; +2:471,507c +3:471,507c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:333c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +2:511c +3:511c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +====1 +1:337c + +2:514a +3:514a +====1 +1:341c + +2:517a +3:517a +====1 +1:348a +2:525c +3:525c + * @throws \InvalidArgumentException +====1 +1:363a +2:541c +3:541c + * @throws \InvalidArgumentException +====1 +1:372,374c + // parse inputs + $resourcePath = "/pet/findByTags"; + $httpBody = ''; +2:550,552c +3:550,552c + + $resourcePath = '/pet/findByTags'; + $formParams = []; +====1 +1:377,382c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:555,557c +3:555,557c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet[]'; +====1 +1:386c + $tags = $this->apiClient->getSerializer()->serializeCollection($tags, 'csv', true); +2:561c +3:561c + $tags = ObjectSerializer::serializeCollection($tags, 'csv', true); +====1 +1:389c + $queryParams['tags'] = $this->apiClient->getSerializer()->toQueryValue($tags); +2:564c +3:564c + $queryParams['tags'] = ObjectSerializer::toQueryValue($tags); +==== +1:391c + +2:566,568c + + + +3:566,568c + + + +====1 +1:394a +2:572c +3:572c + +====1 +1:396c + $httpBody = $formParams; // for HTTP post (form) +2:574,586c +3:574,586c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:397a +2:588,599c +3:588,599c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + +====1 +1:399,400c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:601,602c +3:601,602c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:402c + // make the API Call +2:604,625c +3:604,625c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:404,412c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet[]', + '/pet/findByTags' + ); +2:626a +3:626a +====1 +1:414c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet[]', $httpHeader), $statusCode, $httpHeader]; +2:628,664c +3:628,664c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:418c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +2:668c +3:668c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet[]', $e->getResponseHeaders()); +====1 +1:422c + +2:671a +3:671a +====1 +1:426c + +2:674a +3:674a +====1 +1:433a +2:682c +3:682c + * @throws \InvalidArgumentException +====1 +1:448a +2:698c +3:698c + * @throws \InvalidArgumentException +====1 +1:457,459c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:707,709c +3:707,709c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:462,467c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:712,715c +3:712,715c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Pet'; + +====1 +1:471,475c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:719c +3:719c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +==== +1:477c + +2:721,722c + + +3:721,722c + + +====1 +1:480a +2:726c +3:726c + +====1 +1:482c + $httpBody = $formParams; // for HTTP post (form) +2:728,740c +3:728,740c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:484,487c + // this endpoint requires API key authentication + $apiKey = $this->apiClient->getApiKeyWithPrefix('api_key'); + if (strlen($apiKey) !== 0) { + $headerParams['api_key'] = $apiKey; +2:742,751c +3:742,751c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); +====1 +1:489c + // make the API Call +2:753,780c +3:753,780c + + // this endpoint requires API key authentication + $apiKey = $this->config->getApiKeyWithPrefix('api_key'); + if ($apiKey !== null) { + $headers['api_key'] = $apiKey; + } + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:491,499c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Pet', + '/pet/{petId}' + ); +2:781a +3:781a +====1 +1:501c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Pet', $httpHeader), $statusCode, $httpHeader]; +2:783,819c +3:783,819c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:505c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet', $e->getResponseHeaders()); +2:823c +3:823c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Pet', $e->getResponseHeaders()); +====1 +1:509c + +2:826a +3:826a +====1 +1:513c + +2:829a +3:829a +====1 +1:520a +2:837c +3:837c + * @throws \InvalidArgumentException +====1 +1:525,526c + list($response) = $this->updatePetWithHttpInfo($body); + return $response; +2:842c +3:842c + $this->updatePetWithHttpInfo($body); +====1 +1:535a +2:852c +3:852c + * @throws \InvalidArgumentException +====1 +1:544,546c + // parse inputs + $resourcePath = "/pet"; + $httpBody = ''; +2:861,863c +3:861,863c + + $resourcePath = '/pet'; + $formParams = []; +====1 +1:549,554c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/json', 'application/xml']); +2:866,870c +3:866,870c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:564a +2:881c +3:881c + +====1 +1:566c + $httpBody = $formParams; // for HTTP post (form) +2:883,895c +3:883,895c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:567a +2:897,908c +3:897,908c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/json', 'application/xml'] + ); + } + +====1 +1:569,570c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:910,911c +3:910,911c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:572c + // make the API Call +2:913,934c +3:913,934c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'PUT', + $url, + $headers, + $httpBody + ); + +====1 +1:574,582c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'PUT', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet' + ); +2:935a +3:935a +====1 +1:584c + return [null, $statusCode, $httpHeader]; +2:937,959c +3:937,959c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:588c + +2:962a +3:962a +====1 +1:592c + +2:965a +3:965a +====1 +1:601a +2:975c +3:975c + * @throws \InvalidArgumentException +====1 +1:606,607c + list($response) = $this->updatePetWithFormWithHttpInfo($pet_id, $name, $status); + return $response; +2:980c +3:980c + $this->updatePetWithFormWithHttpInfo($pet_id, $name, $status); +====1 +1:618a +2:992c +3:992c + * @throws \InvalidArgumentException +====1 +1:627,629c + // parse inputs + $resourcePath = "/pet/{petId}"; + $httpBody = ''; +2:1001,1003c +3:1001,1003c + + $resourcePath = '/pet/{petId}'; + $formParams = []; +====1 +1:632,637c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['application/x-www-form-urlencoded']); +2:1006,1009c +3:1006,1009c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:641,645c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:1013c +3:1013c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +====1 +1:646a +2:1015c +3:1015c + +====1 +1:649c + $formParams['name'] = $this->apiClient->getSerializer()->toFormValue($name); +2:1018c +3:1018c + $formParams['name'] = ObjectSerializer::toFormValue($name); +====1 +1:653c + $formParams['status'] = $this->apiClient->getSerializer()->toFormValue($status); +2:1022c +3:1022c + $formParams['status'] = ObjectSerializer::toFormValue($status); +====3 +1:655c +2:1024c + +3:1024c + +====1 +1:658a +2:1028c +3:1028c + +====1 +1:660c + $httpBody = $formParams; // for HTTP post (form) +2:1030,1053c +3:1030,1053c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } + } + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + ['application/x-www-form-urlencoded'] + ); +====1 +1:661a +2:1055c +3:1055c + +====1 +1:663,664c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:1057,1058c +3:1057,1058c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:666c + // make the API Call +2:1060,1081c +3:1060,1081c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:668,676c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/pet/{petId}' + ); +2:1082a +3:1082a +====1 +1:678c + return [null, $statusCode, $httpHeader]; +2:1084,1106c +3:1084,1106c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; + +====1 +1:682c + +2:1109a +3:1109a +====1 +1:686c + +2:1112a +3:1112a +====1 +1:695a +2:1122c +3:1122c + * @throws \InvalidArgumentException +====1 +1:712a +2:1140c +3:1140c + * @throws \InvalidArgumentException +====1 +1:721,723c + // parse inputs + $resourcePath = "/pet/{petId}/uploadImage"; + $httpBody = ''; +2:1149,1151c +3:1149,1151c + + $resourcePath = '/pet/{petId}/uploadImage'; + $formParams = []; +====1 +1:726,731c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType(['multipart/form-data']); +2:1154,1157c +3:1154,1157c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\ApiResponse'; + +====1 +1:735,739c + $resourcePath = str_replace( + "{" . "petId" . "}", + $this->apiClient->getSerializer()->toPathValue($pet_id), + $resourcePath + ); +2:1161c +3:1161c + $resourcePath = str_replace('{' . 'petId' . '}', ObjectSerializer::toPathValue($pet_id), $resourcePath); +====1 +1:740a +2:1163c +3:1163c + +====1 +1:743c + $formParams['additionalMetadata'] = $this->apiClient->getSerializer()->toFormValue($additional_metadata); +2:1166c +3:1166c + $formParams['additionalMetadata'] = ObjectSerializer::toFormValue($additional_metadata); +====1 +1:747,753c + // PHP 5.5 introduced a CurlFile object that deprecates the old @filename syntax + // See: https://wiki.php.net/rfc/curl-file-upload + if (function_exists('curl_file_create')) { + $formParams['file'] = curl_file_create($this->apiClient->getSerializer()->toFormValue($file)); + } else { + $formParams['file'] = '@' . $this->apiClient->getSerializer()->toFormValue($file); + } +2:1170,1171c +3:1170,1171c + $multipart = true; + $formParams['file'] = \GuzzleHttp\Psr7\try_fopen(ObjectSerializer::toFormValue($file), 'rb'); +====3 +1:755c +2:1173c + +3:1173c + +====1 +1:758a +2:1177c +3:1177c + +====1 +1:760c + $httpBody = $formParams; // for HTTP post (form) +2:1179,1191c +3:1179,1191c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:761a +2:1193,1204c +3:1193,1204c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/json'], + ['multipart/form-data'] + ); + } + +====1 +1:763,764c + if (strlen($this->apiClient->getConfig()->getAccessToken()) !== 0) { + $headerParams['Authorization'] = 'Bearer ' . $this->apiClient->getConfig()->getAccessToken(); +2:1206,1207c +3:1206,1207c + if ($this->config->getAccessToken() !== null) { + $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); +====1 +1:766c + // make the API Call +2:1209,1230c +3:1209,1230c + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + +====1 +1:768,776c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\ApiResponse', + '/pet/{petId}/uploadImage' + ); +2:1231a +3:1231a +====1 +1:778c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\ApiResponse', $httpHeader), $statusCode, $httpHeader]; +2:1233,1269c +3:1233,1269c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:782c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\ApiResponse', $e->getResponseHeaders()); +2:1273c +3:1273c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\ApiResponse', $e->getResponseHeaders()); +====1 +1:786c + +2:1276a +3:1276a diff --git a/src/python/merge_conflict_analysis_diffs/1897/spork/diff_PhpClientCodegen.java.txt b/src/python/merge_conflict_analysis_diffs/1897/spork/diff_PhpClientCodegen.java.txt new file mode 100644 index 0000000000..223ce1f225 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/spork/diff_PhpClientCodegen.java.txt @@ -0,0 +1,26 @@ +====3 +1:59c +2:59c + +3:58a +====1 +1:304,305c + supportingFiles.add(new SupportingFile("configuration.mustache", toPackagePath(invokerPackage, srcBasePath), "Configuration.php")); + supportingFiles.add(new SupportingFile("ApiClient.mustache", toPackagePath(invokerPackage, srcBasePath), "ApiClient.php")); +2:303a +3:302a +====1 +1:306a +2:305c +3:304c + supportingFiles.add(new SupportingFile("Configuration.mustache", toPackagePath(invokerPackage, srcBasePath), "Configuration.php")); +====1 +1:307a +2:307c +3:306c + supportingFiles.add(new SupportingFile("HeaderSelector.mustache", toPackagePath(invokerPackage, srcBasePath), "HeaderSelector.php")); +====1 +1:309c + supportingFiles.add(new SupportingFile("autoload.mustache", getPackagePath(), "autoload.php")); +2:308a +3:307a diff --git a/src/python/merge_conflict_analysis_diffs/1897/spork/diff_StoreApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/spork/diff_StoreApi.php.txt new file mode 100644 index 0000000000..d45a4bcd95 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/spork/diff_StoreApi.php.txt @@ -0,0 +1,825 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return StoreApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->deleteOrderWithHttpInfo($order_id); + return $response; +2:96c +3:96c + $this->deleteOrderWithHttpInfo($order_id); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/store/order/{order_id}"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/store/order/{order_id}'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:120,123c +3:120,123c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:134,138c + $resourcePath = str_replace( + "{" . "order_id" . "}", + $this->apiClient->getSerializer()->toPathValue($order_id), + $resourcePath + ); +2:127c +3:127c + $resourcePath = str_replace('{' . 'order_id' . '}', ObjectSerializer::toPathValue($order_id), $resourcePath); +==== +1:140c + +2:129,130c + + +3:129,130c + + +====1 +1:143a +2:134c +3:134c + +====1 +1:145c + $httpBody = $formParams; // for HTTP post (form) +2:136,148c +3:136,148c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:147,156c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/store/order/{order_id}' +2:150,158c +3:150,158c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:157a +2:160,207c +3:160,207c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:159c + return [null, $statusCode, $httpHeader]; +2:208a +3:208a +====1 +1:163c + +2:211a +3:211a +====1 +1:167c + +2:214a +3:214a +====1 +1:173a +2:221c +3:221c + * @throws \InvalidArgumentException +====1 +1:187a +2:236c +3:236c + * @throws \InvalidArgumentException +====1 +1:192,194c + // parse inputs + $resourcePath = "/store/inventory"; + $httpBody = ''; +2:241,243c +3:241,243c + + $resourcePath = '/store/inventory'; + $formParams = []; +==== +1:197,204c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); + + +2:246,252c + $httpBody = ''; + $multipart = false; + $returnType = 'map[string,int]'; + + + + +3:246,252c + $httpBody = ''; + $multipart = false; + $returnType = 'map[string,int]'; + + + + +====1 +1:207a +2:256c +3:256c + +====1 +1:209c + $httpBody = $formParams; // for HTTP post (form) +2:258,270c +3:258,270c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:211,214c + // this endpoint requires API key authentication + $apiKey = $this->apiClient->getApiKeyWithPrefix('api_key'); + if (strlen($apiKey) !== 0) { + $headerParams['api_key'] = $apiKey; +2:272,281c +3:272,281c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/json'], + [] + ); +====1 +1:216c + // make the API Call +2:283,310c +3:283,310c + + // this endpoint requires API key authentication + $apiKey = $this->config->getApiKeyWithPrefix('api_key'); + if ($apiKey !== null) { + $headers['api_key'] = $apiKey; + } + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + +====1 +1:218,226c + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + 'map[string,int]', + '/store/inventory' + ); +2:311a +3:311a +====1 +1:228c + return [$this->apiClient->getSerializer()->deserialize($response, 'map[string,int]', $httpHeader), $statusCode, $httpHeader]; +2:313,349c +3:313,349c + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; + +====1 +1:232c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), 'map[string,int]', $e->getResponseHeaders()); +2:353c +3:353c + $data = ObjectSerializer::deserialize($e->getResponseBody(), 'map[string,int]', $e->getResponseHeaders()); +====1 +1:236c + +2:356a +3:356a +====1 +1:240c + +2:359a +3:359a +====1 +1:247a +2:367c +3:367c + * @throws \InvalidArgumentException +====1 +1:262a +2:383c +3:383c + * @throws \InvalidArgumentException +====1 +1:271c + if (($order_id > 5)) { +2:392c +3:392c + if ($order_id > 5) { +====1 +1:274c + if (($order_id < 1)) { +2:395c +3:395c + if ($order_id < 1) { +====1 +1:278,280c + // parse inputs + $resourcePath = "/store/order/{order_id}"; + $httpBody = ''; +2:399,401c +3:399,401c + + $resourcePath = '/store/order/{order_id}'; + $formParams = []; +====1 +1:283,288c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:404,407c +3:404,407c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Order'; + +====1 +1:292,296c + $resourcePath = str_replace( + "{" . "order_id" . "}", + $this->apiClient->getSerializer()->toPathValue($order_id), + $resourcePath + ); +2:411c +3:411c + $resourcePath = str_replace('{' . 'order_id' . '}', ObjectSerializer::toPathValue($order_id), $resourcePath); +==== +1:298c + +2:413,414c + + +3:413,414c + + +====1 +1:301a +2:418c +3:418c + +====1 +1:303c + $httpBody = $formParams; // for HTTP post (form) +2:420,432c +3:420,432c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:305,314c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Order', + '/store/order/{order_id}' +2:434,442c +3:434,442c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:315a +2:444,505c +3:444,505c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:317c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Order', $httpHeader), $statusCode, $httpHeader]; +2:506a +3:506a +====1 +1:321c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +2:510c +3:510c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +====1 +1:325c + +2:513a +3:513a +====1 +1:329c + +2:516a +3:516a +====1 +1:336a +2:524c +3:524c + * @throws \InvalidArgumentException +====1 +1:351a +2:540c +3:540c + * @throws \InvalidArgumentException +====1 +1:360,362c + // parse inputs + $resourcePath = "/store/order"; + $httpBody = ''; +2:549,551c +3:549,551c + + $resourcePath = '/store/order'; + $formParams = []; +====1 +1:365,370c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:554,558c +3:554,558c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\Order'; + + +====1 +1:380a +2:569c +3:569c + +====1 +1:382c + $httpBody = $formParams; // for HTTP post (form) +2:571,583c +3:571,583c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:384,393c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\Order', + '/store/order' +2:585,588c +3:585,588c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:394a +2:590,656c +3:590,656c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:396c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\Order', $httpHeader), $statusCode, $httpHeader]; +2:657a +3:657a +====1 +1:400c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +2:661c +3:661c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\Order', $e->getResponseHeaders()); +====1 +1:404c + +2:664a +3:664a diff --git a/src/python/merge_conflict_analysis_diffs/1897/spork/diff_UserApi.php.txt b/src/python/merge_conflict_analysis_diffs/1897/spork/diff_UserApi.php.txt new file mode 100644 index 0000000000..f70f624185 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/spork/diff_UserApi.php.txt @@ -0,0 +1,1443 @@ +====1 +1:31,34c + use \Swagger\Client\ApiClient; + use \Swagger\Client\ApiException; + use \Swagger\Client\Configuration; + use \Swagger\Client\ObjectSerializer; +2:31,39c +3:31,39c + use GuzzleHttp\Client; + use GuzzleHttp\ClientInterface; + use GuzzleHttp\Exception\RequestException; + use GuzzleHttp\Psr7\MultipartStream; + use GuzzleHttp\Psr7\Request; + use Swagger\Client\ApiException; + use Swagger\Client\Configuration; + use Swagger\Client\HeaderSelector; + use Swagger\Client\ObjectSerializer; +====1 +1:47,49c + * API Client + * + * @var \Swagger\Client\ApiClient instance of the ApiClient +2:52c +3:52c + * @var ClientInterface +====1 +1:51c + protected $apiClient; +2:54c +3:54c + protected $client; +====1 +1:54,56c + * Constructor + * + * @param \Swagger\Client\ApiClient|null $apiClient The api client to use +2:57c +3:57c + * @var Configuration +====1 +1:58,65c + public function __construct(\Swagger\Client\ApiClient $apiClient = null) + { + if ($apiClient === null) { + $apiClient = new ApiClient(); + } + + $this->apiClient = $apiClient; + } +2:59c +3:59c + protected $config; +====1 +1:68,70c + * Get API client + * + * @return \Swagger\Client\ApiClient get the API client +2:62,64c +3:62,64c + * @param ClientInterface $client + * @param Configuration $config + * @param HeaderSelector $selector +====1 +1:72,74c + public function getApiClient() + { + return $this->apiClient; +2:66,73c +3:66,73c + public function __construct( + ClientInterface $client = null, + Configuration $config = null, + HeaderSelector $selector = null + ) { + $this->client = $client ?: new Client(); + $this->config = $config ?: new Configuration(); + $this->headerSelector = $selector ?: new HeaderSelector(); +====1 +1:78,82c + * Set the API client + * + * @param \Swagger\Client\ApiClient $apiClient set the API client + * + * @return UserApi +2:77c +3:77c + * @return Configuration +====1 +1:84c + public function setApiClient(\Swagger\Client\ApiClient $apiClient) +2:79c +3:79c + public function getConfig() +====1 +1:86,87c + $this->apiClient = $apiClient; + return $this; +2:81c +3:81c + return $this->config; +====1 +1:96a +2:91c +3:91c + * @throws \InvalidArgumentException +====1 +1:101,102c + list($response) = $this->createUserWithHttpInfo($body); + return $response; +2:96c +3:96c + $this->createUserWithHttpInfo($body); +====1 +1:111a +2:106c +3:106c + * @throws \InvalidArgumentException +====1 +1:120,122c + // parse inputs + $resourcePath = "/user"; + $httpBody = ''; +2:115,117c +3:115,117c + + $resourcePath = '/user'; + $formParams = []; +====1 +1:125,130c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:120,124c +3:120,124c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:140a +2:135c +3:135c + +====1 +1:142c + $httpBody = $formParams; // for HTTP post (form) +2:137,149c +3:137,149c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:144,153c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user' +2:151,154c +3:151,154c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:154a +2:156,208c +3:156,208c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:156c + return [null, $statusCode, $httpHeader]; +2:209a +3:209a +====1 +1:160c + +2:212a +3:212a +====1 +1:164c + +2:215a +3:215a +====1 +1:171a +2:223c +3:223c + * @throws \InvalidArgumentException +====1 +1:176,177c + list($response) = $this->createUsersWithArrayInputWithHttpInfo($body); + return $response; +2:228c +3:228c + $this->createUsersWithArrayInputWithHttpInfo($body); +====1 +1:186a +2:238c +3:238c + * @throws \InvalidArgumentException +====1 +1:195,197c + // parse inputs + $resourcePath = "/user/createWithArray"; + $httpBody = ''; +2:247,249c +3:247,249c + + $resourcePath = '/user/createWithArray'; + $formParams = []; +====1 +1:200,205c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:252,256c +3:252,256c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:215a +2:267c +3:267c + +====1 +1:217c + $httpBody = $formParams; // for HTTP post (form) +2:269,281c +3:269,281c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:219,228c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/createWithArray' +2:283,291c +3:283,291c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:229a +2:293,340c +3:293,340c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:231c + return [null, $statusCode, $httpHeader]; +2:341a +3:341a +====1 +1:235c + +2:344a +3:344a +====1 +1:239c + +2:347a +3:347a +====1 +1:246a +2:355c +3:355c + * @throws \InvalidArgumentException +====1 +1:251,252c + list($response) = $this->createUsersWithListInputWithHttpInfo($body); + return $response; +2:360c +3:360c + $this->createUsersWithListInputWithHttpInfo($body); +====1 +1:261a +2:370c +3:370c + * @throws \InvalidArgumentException +====1 +1:270,272c + // parse inputs + $resourcePath = "/user/createWithList"; + $httpBody = ''; +2:379,381c +3:379,381c + + $resourcePath = '/user/createWithList'; + $formParams = []; +====1 +1:275,280c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:384,388c +3:384,388c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + +====1 +1:290a +2:399c +3:399c + +====1 +1:292c + $httpBody = $formParams; // for HTTP post (form) +2:401,413c +3:401,413c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:294,303c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'POST', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/createWithList' +2:415,423c +3:415,423c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:304a +2:425,472c +3:425,472c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'POST', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:306c + return [null, $statusCode, $httpHeader]; +2:473a +3:473a +====1 +1:310c + +2:476a +3:476a +====1 +1:314c + +2:479a +3:479a +====1 +1:321a +2:487c +3:487c + * @throws \InvalidArgumentException +====1 +1:326,327c + list($response) = $this->deleteUserWithHttpInfo($username); + return $response; +2:492c +3:492c + $this->deleteUserWithHttpInfo($username); +====1 +1:336a +2:502c +3:502c + * @throws \InvalidArgumentException +====1 +1:345,347c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:511,513c +3:511,513c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:350,355c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:516,519c +3:516,519c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:359,363c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:523c +3:523c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +==== +1:365c + +2:525,526c + + +3:525,526c + + +====1 +1:368a +2:530c +3:530c + +====1 +1:370c + $httpBody = $formParams; // for HTTP post (form) +2:532,544c +3:532,544c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:372,381c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'DELETE', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/{username}' +2:546,554c +3:546,554c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:382a +2:556,603c +3:556,603c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'DELETE', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:384c + return [null, $statusCode, $httpHeader]; +2:604a +3:604a +====1 +1:388c + +2:607a +3:607a +====1 +1:392c + +2:610a +3:610a +====1 +1:399a +2:618c +3:618c + * @throws \InvalidArgumentException +====1 +1:414a +2:634c +3:634c + * @throws \InvalidArgumentException +====1 +1:423,425c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:643,645c +3:643,645c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:428,433c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:648,651c +3:648,651c + $httpBody = ''; + $multipart = false; + $returnType = '\Swagger\Client\Model\User'; + +====1 +1:437,441c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:655c +3:655c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +==== +1:443c + +2:657,658c + + +3:657,658c + + +====1 +1:446a +2:662c +3:662c + +====1 +1:448c + $httpBody = $formParams; // for HTTP post (form) +2:664,676c +3:664,676c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:450,459c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + '\Swagger\Client\Model\User', + '/user/{username}' +2:678,681c +3:678,681c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:460a +2:683,749c +3:683,749c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:462c + return [$this->apiClient->getSerializer()->deserialize($response, '\Swagger\Client\Model\User', $httpHeader), $statusCode, $httpHeader]; +2:750a +3:750a +====1 +1:466c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), '\Swagger\Client\Model\User', $e->getResponseHeaders()); +2:754c +3:754c + $data = ObjectSerializer::deserialize($e->getResponseBody(), '\Swagger\Client\Model\User', $e->getResponseHeaders()); +====1 +1:470c + +2:757a +3:757a +====1 +1:474c + +2:760a +3:760a +====1 +1:482a +2:769c +3:769c + * @throws \InvalidArgumentException +====1 +1:498a +2:786c +3:786c + * @throws \InvalidArgumentException +====1 +1:511,513c + // parse inputs + $resourcePath = "/user/login"; + $httpBody = ''; +2:799,801c +3:799,801c + + $resourcePath = '/user/login'; + $formParams = []; +====1 +1:516,521c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:804,806c +3:804,806c + $httpBody = ''; + $multipart = false; + $returnType = 'string'; +====1 +1:525c + $queryParams['username'] = $this->apiClient->getSerializer()->toQueryValue($username); +2:810c +3:810c + $queryParams['username'] = ObjectSerializer::toQueryValue($username); +====1 +1:529c + $queryParams['password'] = $this->apiClient->getSerializer()->toQueryValue($password); +2:814c +3:814c + $queryParams['password'] = ObjectSerializer::toQueryValue($password); +==== +1:531c + +2:816,818c + + + +3:816,818c + + + +====1 +1:534a +2:822c +3:822c + +====1 +1:536c + $httpBody = $formParams; // for HTTP post (form) +2:824,836c +3:824,836c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:538,547c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + 'string', + '/user/login' +2:838,846c +3:838,846c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:548a +2:848,909c +3:848,909c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + $responseBody = $response->getBody(); + if ($returnType === '\SplFileObject') { + $content = $responseBody; //stream goes to serializer + } else { + $content = $responseBody->getContents(); + if ($returnType !== 'string') { + $content = json_decode($content); + } + } + + return [ + ObjectSerializer::deserialize($content, $returnType, []), + $response->getStatusCode(), + $response->getHeaders() + ]; +====1 +1:550c + return [$this->apiClient->getSerializer()->deserialize($response, 'string', $httpHeader), $statusCode, $httpHeader]; +2:910a +3:910a +====1 +1:554c + $data = $this->apiClient->getSerializer()->deserialize($e->getResponseBody(), 'string', $e->getResponseHeaders()); +2:914c +3:914c + $data = ObjectSerializer::deserialize($e->getResponseBody(), 'string', $e->getResponseHeaders()); +====1 +1:558c + +2:917a +3:917a +====1 +1:562c + +2:920a +3:920a +====1 +1:568a +2:927c +3:927c + * @throws \InvalidArgumentException +====1 +1:573,574c + list($response) = $this->logoutUserWithHttpInfo(); + return $response; +2:932c +3:932c + $this->logoutUserWithHttpInfo(); +====1 +1:582a +2:941c +3:941c + * @throws \InvalidArgumentException +====1 +1:587,589c + // parse inputs + $resourcePath = "/user/logout"; + $httpBody = ''; +2:946,948c +3:946,948c + + $resourcePath = '/user/logout'; + $formParams = []; +==== +1:592,599c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); + + +2:951,957c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + + + +3:951,957c + $httpBody = ''; + $multipart = false; + $returnType = ''; + + + + +====1 +1:602a +2:961c +3:961c + +====1 +1:604c + $httpBody = $formParams; // for HTTP post (form) +2:963,975c +3:963,975c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:606,615c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'GET', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/logout' +2:977,980c +3:977,980c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] +====1 +1:616a +2:982,1034c +3:982,1034c + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] + ); + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'GET', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:618c + return [null, $statusCode, $httpHeader]; +2:1035a +3:1035a +====1 +1:622c + +2:1038a +3:1038a +====1 +1:626c + +2:1041a +3:1041a +====1 +1:634a +2:1050c +3:1050c + * @throws \InvalidArgumentException +====1 +1:639,640c + list($response) = $this->updateUserWithHttpInfo($username, $body); + return $response; +2:1055c +3:1055c + $this->updateUserWithHttpInfo($username, $body); +====1 +1:650a +2:1066c +3:1066c + * @throws \InvalidArgumentException +====1 +1:663,665c + // parse inputs + $resourcePath = "/user/{username}"; + $httpBody = ''; +2:1079,1081c +3:1079,1081c + + $resourcePath = '/user/{username}'; + $formParams = []; +====1 +1:668,673c + $formParams = []; + $_header_accept = $this->apiClient->selectHeaderAccept(['application/xml', 'application/json']); + if (!is_null($_header_accept)) { + $headerParams['Accept'] = $_header_accept; + } + $headerParams['Content-Type'] = $this->apiClient->selectHeaderContentType([]); +2:1084,1087c +3:1084,1087c + $httpBody = ''; + $multipart = false; + $returnType = ''; + +====1 +1:677,681c + $resourcePath = str_replace( + "{" . "username" . "}", + $this->apiClient->getSerializer()->toPathValue($username), + $resourcePath + ); +2:1091c +3:1091c + $resourcePath = str_replace('{' . 'username' . '}', ObjectSerializer::toPathValue($username), $resourcePath); +====1 +1:682a +2:1093c +3:1093c + +====1 +1:691a +2:1103c +3:1103c + +====1 +1:693c + $httpBody = $formParams; // for HTTP post (form) +2:1105,1117c +3:1105,1117c + if ($multipart) { + $multipartContents = []; + foreach ($formParams as $formParamName => $formParamValue) { + $multipartContents[] = [ + 'name' => $formParamName, + 'contents' => $formParamValue + ]; + } + $httpBody = new MultipartStream($multipartContents); // for HTTP post (form) + + } else { + $httpBody = \GuzzleHttp\Psr7\build_query($formParams); // for HTTP post (form) + } +====1 +1:695,704c + // make the API Call + try { + list($response, $statusCode, $httpHeader) = $this->apiClient->callApi( + $resourcePath, + 'PUT', + $queryParams, + $httpBody, + $headerParams, + null, + '/user/{username}' +2:1119,1127c +3:1119,1127c + + if ($httpBody instanceof MultipartStream) { + $headers= $this->headerSelector->selectHeadersForMultipart( + ['application/xml', 'application/json'] + ); + } else { + $headers = $this->headerSelector->selectHeaders( + ['application/xml', 'application/json'], + [] +====1 +1:705a +2:1129,1176c +3:1129,1176c + } + + + $query = \GuzzleHttp\Psr7\build_query($queryParams); + $url = $this->config->getHost() . $resourcePath . ($query ? '?' . $query : ''); + + $defaultHeaders = []; + if ($this->config->getUserAgent()) { + $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); + } + + $headers = array_merge( + $defaultHeaders, + $headerParams, + $headers + ); + + $request = new Request( + 'PUT', + $url, + $headers, + $httpBody + ); + + try { + + try { + $response = $this->client->send($request); + } catch (RequestException $e) { + throw new ApiException( + "[{$e->getCode()}] {$e->getMessage()}", + $e->getCode(), + $e->getResponse() ? $e->getResponse()->getHeaders() : null + ); + } + + $statusCode = $response->getStatusCode(); + + if ($statusCode < 200 || $statusCode > 299) { + throw new ApiException( + "[$statusCode] Error connecting to the API ($url)", + $statusCode, + $response->getHeaders(), + $response->getBody() + ); + } + + return [null, $statusCode, $response->getHeaders()]; +====1 +1:707c + return [null, $statusCode, $httpHeader]; +2:1177a +3:1177a +====1 +1:711c + +2:1180a +3:1180a diff --git a/src/python/merge_conflict_analysis_diffs/1897/spork/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/1897/spork/diff_VERSION.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/python/merge_conflict_analysis_diffs/1897/spork/diff_io.swagger.codegen.CodegenConfig.txt b/src/python/merge_conflict_analysis_diffs/1897/spork/diff_io.swagger.codegen.CodegenConfig.txt new file mode 100644 index 0000000000..909e86e0be --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1897/spork/diff_io.swagger.codegen.CodegenConfig.txt @@ -0,0 +1,13 @@ +==== +1:2c + io.swagger.codegen.languages.AspNet5ServerCodegen +2:1a +3:2c + io.swagger.codegen.languages.ApexClientCodegen +====3 +1:67c +2:66c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen +\ No newline at end of file +3:67c + io.swagger.codegen.languages.ZendExpressivePathHandlerServerCodegen diff --git a/src/python/merge_conflict_analysis_diffs/1928/git_hires_merge/diff_Category.java.txt b/src/python/merge_conflict_analysis_diffs/1928/git_hires_merge/diff_Category.java.txt new file mode 100644 index 0000000000..00bebb6026 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/git_hires_merge/diff_Category.java.txt @@ -0,0 +1,9 @@ +====1 +1:6,8c + + + +2:6,7c +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/git_hires_merge/diff_ModelApiResponse.java.txt b/src/python/merge_conflict_analysis_diffs/1928/git_hires_merge/diff_ModelApiResponse.java.txt new file mode 100644 index 0000000000..00bebb6026 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/git_hires_merge/diff_ModelApiResponse.java.txt @@ -0,0 +1,9 @@ +====1 +1:6,8c + + + +2:6,7c +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/git_hires_merge/diff_Tag.java.txt b/src/python/merge_conflict_analysis_diffs/1928/git_hires_merge/diff_Tag.java.txt new file mode 100644 index 0000000000..00bebb6026 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/git_hires_merge/diff_Tag.java.txt @@ -0,0 +1,9 @@ +====1 +1:6,8c + + + +2:6,7c +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/git_hires_merge/diff_User.java.txt b/src/python/merge_conflict_analysis_diffs/1928/git_hires_merge/diff_User.java.txt new file mode 100644 index 0000000000..d6db7f4f4d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/git_hires_merge/diff_User.java.txt @@ -0,0 +1,15 @@ +====1 +1:6,9c + + + + +2:6,7c +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; +====2 +1:10a +3:8a +2:9c + diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort/diff_Category.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort/diff_Category.java.txt new file mode 100644 index 0000000000..448261e970 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort/diff_Category.java.txt @@ -0,0 +1,21 @@ +==== +1:6,8c + + + +2:6,17c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort/diff_ModelApiResponse.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort/diff_ModelApiResponse.java.txt new file mode 100644 index 0000000000..448261e970 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort/diff_ModelApiResponse.java.txt @@ -0,0 +1,21 @@ +==== +1:6,8c + + + +2:6,17c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort/diff_Tag.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort/diff_Tag.java.txt new file mode 100644 index 0000000000..448261e970 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort/diff_Tag.java.txt @@ -0,0 +1,21 @@ +==== +1:6,8c + + + +2:6,17c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort/diff_User.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort/diff_User.java.txt new file mode 100644 index 0000000000..b7c108941f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort/diff_User.java.txt @@ -0,0 +1,23 @@ +==== +1:6,9c + + + + +2:6,18c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH + +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_adjacent/diff_Category.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_adjacent/diff_Category.java.txt new file mode 100644 index 0000000000..00bebb6026 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_adjacent/diff_Category.java.txt @@ -0,0 +1,9 @@ +====1 +1:6,8c + + + +2:6,7c +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_adjacent/diff_ModelApiResponse.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_adjacent/diff_ModelApiResponse.java.txt new file mode 100644 index 0000000000..00bebb6026 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_adjacent/diff_ModelApiResponse.java.txt @@ -0,0 +1,9 @@ +====1 +1:6,8c + + + +2:6,7c +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_adjacent/diff_Tag.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_adjacent/diff_Tag.java.txt new file mode 100644 index 0000000000..00bebb6026 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_adjacent/diff_Tag.java.txt @@ -0,0 +1,9 @@ +====1 +1:6,8c + + + +2:6,7c +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_adjacent/diff_User.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_adjacent/diff_User.java.txt new file mode 100644 index 0000000000..d6db7f4f4d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_adjacent/diff_User.java.txt @@ -0,0 +1,15 @@ +====1 +1:6,9c + + + + +2:6,7c +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; +====2 +1:10a +3:8a +2:9c + diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_ignorespace/diff_Category.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_ignorespace/diff_Category.java.txt new file mode 100644 index 0000000000..448261e970 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_ignorespace/diff_Category.java.txt @@ -0,0 +1,21 @@ +==== +1:6,8c + + + +2:6,17c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_ignorespace/diff_ModelApiResponse.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_ignorespace/diff_ModelApiResponse.java.txt new file mode 100644 index 0000000000..448261e970 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_ignorespace/diff_ModelApiResponse.java.txt @@ -0,0 +1,21 @@ +==== +1:6,8c + + + +2:6,17c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_ignorespace/diff_Tag.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_ignorespace/diff_Tag.java.txt new file mode 100644 index 0000000000..448261e970 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_ignorespace/diff_Tag.java.txt @@ -0,0 +1,21 @@ +==== +1:6,8c + + + +2:6,17c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_ignorespace/diff_User.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_ignorespace/diff_User.java.txt new file mode 100644 index 0000000000..b7c108941f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_ignorespace/diff_User.java.txt @@ -0,0 +1,23 @@ +==== +1:6,9c + + + + +2:6,18c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH + +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports/diff_Category.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports/diff_Category.java.txt new file mode 100644 index 0000000000..00bebb6026 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports/diff_Category.java.txt @@ -0,0 +1,9 @@ +====1 +1:6,8c + + + +2:6,7c +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports/diff_ModelApiResponse.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports/diff_ModelApiResponse.java.txt new file mode 100644 index 0000000000..00bebb6026 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports/diff_ModelApiResponse.java.txt @@ -0,0 +1,9 @@ +====1 +1:6,8c + + + +2:6,7c +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports/diff_Tag.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports/diff_Tag.java.txt new file mode 100644 index 0000000000..00bebb6026 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports/diff_Tag.java.txt @@ -0,0 +1,9 @@ +====1 +1:6,8c + + + +2:6,7c +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports/diff_User.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports/diff_User.java.txt new file mode 100644 index 0000000000..d6db7f4f4d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports/diff_User.java.txt @@ -0,0 +1,15 @@ +====1 +1:6,9c + + + + +2:6,7c +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; +====2 +1:10a +3:8a +2:9c + diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports_ignorespace/diff_Category.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports_ignorespace/diff_Category.java.txt new file mode 100644 index 0000000000..00bebb6026 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports_ignorespace/diff_Category.java.txt @@ -0,0 +1,9 @@ +====1 +1:6,8c + + + +2:6,7c +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports_ignorespace/diff_ModelApiResponse.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports_ignorespace/diff_ModelApiResponse.java.txt new file mode 100644 index 0000000000..00bebb6026 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports_ignorespace/diff_ModelApiResponse.java.txt @@ -0,0 +1,9 @@ +====1 +1:6,8c + + + +2:6,7c +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports_ignorespace/diff_Tag.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports_ignorespace/diff_Tag.java.txt new file mode 100644 index 0000000000..00bebb6026 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports_ignorespace/diff_Tag.java.txt @@ -0,0 +1,9 @@ +====1 +1:6,8c + + + +2:6,7c +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports_ignorespace/diff_User.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports_ignorespace/diff_User.java.txt new file mode 100644 index 0000000000..d6db7f4f4d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_ort_imports_ignorespace/diff_User.java.txt @@ -0,0 +1,15 @@ +====1 +1:6,9c + + + + +2:6,7c +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; +====2 +1:10a +3:8a +2:9c + diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_histogram/diff_Category.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_histogram/diff_Category.java.txt new file mode 100644 index 0000000000..448261e970 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_histogram/diff_Category.java.txt @@ -0,0 +1,21 @@ +==== +1:6,8c + + + +2:6,17c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_histogram/diff_ModelApiResponse.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_histogram/diff_ModelApiResponse.java.txt new file mode 100644 index 0000000000..448261e970 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_histogram/diff_ModelApiResponse.java.txt @@ -0,0 +1,21 @@ +==== +1:6,8c + + + +2:6,17c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_histogram/diff_Tag.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_histogram/diff_Tag.java.txt new file mode 100644 index 0000000000..448261e970 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_histogram/diff_Tag.java.txt @@ -0,0 +1,21 @@ +==== +1:6,8c + + + +2:6,17c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_histogram/diff_User.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_histogram/diff_User.java.txt new file mode 100644 index 0000000000..b7c108941f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_histogram/diff_User.java.txt @@ -0,0 +1,23 @@ +==== +1:6,9c + + + + +2:6,18c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH + +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_ignorespace/diff_Category.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_ignorespace/diff_Category.java.txt new file mode 100644 index 0000000000..448261e970 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_ignorespace/diff_Category.java.txt @@ -0,0 +1,21 @@ +==== +1:6,8c + + + +2:6,17c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_ignorespace/diff_ModelApiResponse.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_ignorespace/diff_ModelApiResponse.java.txt new file mode 100644 index 0000000000..448261e970 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_ignorespace/diff_ModelApiResponse.java.txt @@ -0,0 +1,21 @@ +==== +1:6,8c + + + +2:6,17c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_ignorespace/diff_Tag.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_ignorespace/diff_Tag.java.txt new file mode 100644 index 0000000000..448261e970 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_ignorespace/diff_Tag.java.txt @@ -0,0 +1,21 @@ +==== +1:6,8c + + + +2:6,17c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_ignorespace/diff_User.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_ignorespace/diff_User.java.txt new file mode 100644 index 0000000000..b7c108941f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_ignorespace/diff_User.java.txt @@ -0,0 +1,23 @@ +==== +1:6,9c + + + + +2:6,18c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH + +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_minimal/diff_Category.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_minimal/diff_Category.java.txt new file mode 100644 index 0000000000..448261e970 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_minimal/diff_Category.java.txt @@ -0,0 +1,21 @@ +==== +1:6,8c + + + +2:6,17c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_minimal/diff_ModelApiResponse.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_minimal/diff_ModelApiResponse.java.txt new file mode 100644 index 0000000000..448261e970 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_minimal/diff_ModelApiResponse.java.txt @@ -0,0 +1,21 @@ +==== +1:6,8c + + + +2:6,17c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_minimal/diff_Tag.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_minimal/diff_Tag.java.txt new file mode 100644 index 0000000000..448261e970 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_minimal/diff_Tag.java.txt @@ -0,0 +1,21 @@ +==== +1:6,8c + + + +2:6,17c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_minimal/diff_User.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_minimal/diff_User.java.txt new file mode 100644 index 0000000000..b7c108941f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_minimal/diff_User.java.txt @@ -0,0 +1,23 @@ +==== +1:6,9c + + + + +2:6,18c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH + +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_myers/diff_Category.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_myers/diff_Category.java.txt new file mode 100644 index 0000000000..448261e970 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_myers/diff_Category.java.txt @@ -0,0 +1,21 @@ +==== +1:6,8c + + + +2:6,17c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_myers/diff_ModelApiResponse.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_myers/diff_ModelApiResponse.java.txt new file mode 100644 index 0000000000..448261e970 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_myers/diff_ModelApiResponse.java.txt @@ -0,0 +1,21 @@ +==== +1:6,8c + + + +2:6,17c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_myers/diff_Tag.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_myers/diff_Tag.java.txt new file mode 100644 index 0000000000..448261e970 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_myers/diff_Tag.java.txt @@ -0,0 +1,21 @@ +==== +1:6,8c + + + +2:6,17c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_myers/diff_User.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_myers/diff_User.java.txt new file mode 100644 index 0000000000..b7c108941f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_myers/diff_User.java.txt @@ -0,0 +1,23 @@ +==== +1:6,9c + + + + +2:6,18c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH + +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_patience/diff_Category.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_patience/diff_Category.java.txt new file mode 100644 index 0000000000..448261e970 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_patience/diff_Category.java.txt @@ -0,0 +1,21 @@ +==== +1:6,8c + + + +2:6,17c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_patience/diff_ModelApiResponse.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_patience/diff_ModelApiResponse.java.txt new file mode 100644 index 0000000000..448261e970 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_patience/diff_ModelApiResponse.java.txt @@ -0,0 +1,21 @@ +==== +1:6,8c + + + +2:6,17c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_patience/diff_Tag.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_patience/diff_Tag.java.txt new file mode 100644 index 0000000000..448261e970 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_patience/diff_Tag.java.txt @@ -0,0 +1,21 @@ +==== +1:6,8c + + + +2:6,17c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_patience/diff_User.java.txt b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_patience/diff_User.java.txt new file mode 100644 index 0000000000..b7c108941f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/gitmerge_recursive_patience/diff_User.java.txt @@ -0,0 +1,23 @@ +==== +1:6,9c + + + + +2:6,18c + import com.fasterxml.jackson.annotation.JsonCreator; + <<<<<<< HEAD + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + + + + >>>>>>> TEMP_RIGHT_BRANCH + +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/intellimerge/diff_Category.java.txt b/src/python/merge_conflict_analysis_diffs/1928/intellimerge/diff_Category.java.txt new file mode 100644 index 0000000000..7075489f57 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/intellimerge/diff_Category.java.txt @@ -0,0 +1,22 @@ +==== +1:6,8c + + + +2:6,18c + <<<<<<< HEAD + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + import com.fasterxml.jackson.annotation.JsonCreator; + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/intellimerge/diff_ModelApiResponse.java.txt b/src/python/merge_conflict_analysis_diffs/1928/intellimerge/diff_ModelApiResponse.java.txt new file mode 100644 index 0000000000..7075489f57 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/intellimerge/diff_ModelApiResponse.java.txt @@ -0,0 +1,22 @@ +==== +1:6,8c + + + +2:6,18c + <<<<<<< HEAD + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + import com.fasterxml.jackson.annotation.JsonCreator; + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/intellimerge/diff_Tag.java.txt b/src/python/merge_conflict_analysis_diffs/1928/intellimerge/diff_Tag.java.txt new file mode 100644 index 0000000000..7075489f57 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/intellimerge/diff_Tag.java.txt @@ -0,0 +1,22 @@ +==== +1:6,8c + + + +2:6,18c + <<<<<<< HEAD + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + import com.fasterxml.jackson.annotation.JsonCreator; + + + + >>>>>>> TEMP_RIGHT_BRANCH +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/intellimerge/diff_User.java.txt b/src/python/merge_conflict_analysis_diffs/1928/intellimerge/diff_User.java.txt new file mode 100644 index 0000000000..181b4fc9c0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/intellimerge/diff_User.java.txt @@ -0,0 +1,24 @@ +==== +1:6,9c + + + + +2:6,19c + <<<<<<< HEAD + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; + ||||||| d21613c6ab + + + + ======= + import com.fasterxml.jackson.annotation.JsonCreator; + + + + >>>>>>> TEMP_RIGHT_BRANCH + +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/spork/diff_Category.java.txt b/src/python/merge_conflict_analysis_diffs/1928/spork/diff_Category.java.txt new file mode 100644 index 0000000000..00bebb6026 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/spork/diff_Category.java.txt @@ -0,0 +1,9 @@ +====1 +1:6,8c + + + +2:6,7c +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/spork/diff_JavaClientCodegen.java.txt b/src/python/merge_conflict_analysis_diffs/1928/spork/diff_JavaClientCodegen.java.txt new file mode 100644 index 0000000000..e6a5c3b40b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/spork/diff_JavaClientCodegen.java.txt @@ -0,0 +1,15 @@ +====1 +1:136a +2:137,139c +3:137,139c + if ("retrofit2".equals(getLibrary())) { + supportingFiles.add(new SupportingFile("JSON.mustache", invokerFolder, "JSON.java")); + } +====3 +1:144a +2:147a +3:148,151c + + if (additionalProperties.containsKey("jackson") ) { + supportingFiles.add(new SupportingFile("RFC3339DateFormat.mustache", invokerFolder, "RFC3339DateFormat.java")); + } diff --git a/src/python/merge_conflict_analysis_diffs/1928/spork/diff_ModelApiResponse.java.txt b/src/python/merge_conflict_analysis_diffs/1928/spork/diff_ModelApiResponse.java.txt new file mode 100644 index 0000000000..00bebb6026 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/spork/diff_ModelApiResponse.java.txt @@ -0,0 +1,9 @@ +====1 +1:6,8c + + + +2:6,7c +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/spork/diff_ObjcClientCodegen.java.txt b/src/python/merge_conflict_analysis_diffs/1928/spork/diff_ObjcClientCodegen.java.txt new file mode 100644 index 0000000000..4fccbd8bde --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/spork/diff_ObjcClientCodegen.java.txt @@ -0,0 +1,29 @@ +====1 +1:258c + supportingFiles.add(new SupportingFile("ApiClient-header.mustache", coreFileFolder(), classPrefix + "ApiClient.h")); +2:258c +3:258c + supportingFiles.add(new SupportingFile("ApiClient-header.mustache", coreFileFolder(), classPrefix + "ApiClient.h")); +====1 +1:260,261c + supportingFiles.add(new SupportingFile("JSONResponseSerializer-header.mustache", coreFileFolder(), classPrefix + "JSONResponseSerializer.h")); + supportingFiles.add(new SupportingFile("JSONResponseSerializer-body.mustache", coreFileFolder(), classPrefix + "JSONResponseSerializer.m")); +2:259a +3:259a +====1 +1:272,273c + supportingFiles.add(new SupportingFile("Configuration-body.mustache", coreFileFolder(), classPrefix + "Configuration.m")); + supportingFiles.add(new SupportingFile("Configuration-header.mustache", coreFileFolder(), classPrefix + "Configuration.h")); +2:270,274c +3:270,274c + supportingFiles.add(new SupportingFile("Configuration-protocol.mustache", coreFileFolder(), classPrefix + "Configuration.h")); + supportingFiles.add(new SupportingFile("DefaultConfiguration-body.mustache", coreFileFolder(), classPrefix + "DefaultConfiguration.m")); + supportingFiles.add(new SupportingFile("DefaultConfiguration-header.mustache", coreFileFolder(), classPrefix + "DefaultConfiguration.h")); + supportingFiles.add(new SupportingFile("BasicAuthTokenProvider-header.mustache", coreFileFolder(), classPrefix + "BasicAuthTokenProvider.h")); + supportingFiles.add(new SupportingFile("BasicAuthTokenProvider-body.mustache", coreFileFolder(), classPrefix + "BasicAuthTokenProvider.m")); +====3 +1:362c +2:363c + return getSwaggerType(p) + "*"; +3:363c + return getSwaggerType(p) + "<" + innerTypeDeclaration + ">*"; diff --git a/src/python/merge_conflict_analysis_diffs/1928/spork/diff_Order.java.txt b/src/python/merge_conflict_analysis_diffs/1928/spork/diff_Order.java.txt new file mode 100644 index 0000000000..9a58a79f51 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/spork/diff_Order.java.txt @@ -0,0 +1,17 @@ +====1 +1:5a +2:6c +3:6c + import com.fasterxml.jackson.annotation.JsonCreator; +====1 +1:6a +2:8c +3:8c + import io.swagger.annotations.ApiModel; +====1 +1:10,12c + + + +2:11a +3:11a diff --git a/src/python/merge_conflict_analysis_diffs/1928/spork/diff_Pet.java.txt b/src/python/merge_conflict_analysis_diffs/1928/spork/diff_Pet.java.txt new file mode 100644 index 0000000000..4436481ebc --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/spork/diff_Pet.java.txt @@ -0,0 +1,17 @@ +====1 +1:5a +2:6c +3:6c + import com.fasterxml.jackson.annotation.JsonCreator; +====1 +1:6a +2:8c +3:8c + import io.swagger.annotations.ApiModel; +====1 +1:12,14c + + + +2:13a +3:13a diff --git a/src/python/merge_conflict_analysis_diffs/1928/spork/diff_Tag.java.txt b/src/python/merge_conflict_analysis_diffs/1928/spork/diff_Tag.java.txt new file mode 100644 index 0000000000..00bebb6026 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/spork/diff_Tag.java.txt @@ -0,0 +1,9 @@ +====1 +1:6,8c + + + +2:6,7c +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; diff --git a/src/python/merge_conflict_analysis_diffs/1928/spork/diff_User.java.txt b/src/python/merge_conflict_analysis_diffs/1928/spork/diff_User.java.txt new file mode 100644 index 0000000000..d6db7f4f4d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/1928/spork/diff_User.java.txt @@ -0,0 +1,15 @@ +====1 +1:6,9c + + + + +2:6,7c +3:6,7c + import com.fasterxml.jackson.annotation.JsonCreator; + import io.swagger.annotations.ApiModel; +====2 +1:10a +3:8a +2:9c + diff --git a/src/python/merge_conflict_analysis_diffs/354/git_hires_merge/diff_BlockListener.java.txt b/src/python/merge_conflict_analysis_diffs/354/git_hires_merge/diff_BlockListener.java.txt new file mode 100644 index 0000000000..259f22b174 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/354/git_hires_merge/diff_BlockListener.java.txt @@ -0,0 +1,60 @@ +====3 +1:3a +2:3a +3:4c + import org.bukkit.plugin.AuthorNagException; +====3 +1:34a +2:34a +3:36c + * @throws BukkitAuthorNagException +====3 +1:36a +2:36a +3:39,40c + onBlockFlow(event); + throw new AuthorNagException("onBlockFlow has been deprecated, use onBlockFromTo"); +====3 +1:38a +2:38a +3:43,45c + // Prevent compilation of old signatures TODO: Remove after 1.4 + @Deprecated public void onBlockFlow(BlockFromToEvent event) {} + +==== +1:103a +2:104,111c + + /** + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + } +3:111,135c + + /** + * Called when a world is attempting to place a block during a snowfall + * + * @param event Relevant event details + */ + public void onSnowForm(SnowFormEvent event) { + } + + /** + * Called when a block is dispensing an item + * + * @param event Relevant event details + */ + public void onBlockDispense(BlockDispenseEvent event) { + } + + /** + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + } + diff --git a/src/python/merge_conflict_analysis_diffs/354/gitmerge_ort/diff_BlockListener.java.txt b/src/python/merge_conflict_analysis_diffs/354/gitmerge_ort/diff_BlockListener.java.txt new file mode 100644 index 0000000000..aa758449c2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/354/gitmerge_ort/diff_BlockListener.java.txt @@ -0,0 +1,77 @@ +====1 +1:3a +2:4c +3:4c + import org.bukkit.plugin.AuthorNagException; +====1 +1:34a +2:36c +3:36c + * @throws BukkitAuthorNagException +====1 +1:36a +2:39,40c +3:39,40c + onBlockFlow(event); + throw new AuthorNagException("onBlockFlow has been deprecated, use onBlockFromTo"); +====1 +1:38a +2:43,45c +3:43,45c + // Prevent compilation of old signatures TODO: Remove after 1.4 + @Deprecated public void onBlockFlow(BlockFromToEvent event) {} + +==== +1:103a +2:111,135c + + /** + <<<<<<< HEAD + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + ||||||| 325fbdc0 + ======= + * Called when a world is attempting to place a block during a snowfall + * + * @param event Relevant event details + */ + public void onSnowForm(SnowFormEvent event) { + } + + /** + * Called when a block is dispensing an item + * + * @param event Relevant event details + */ + public void onBlockDispense(BlockDispenseEvent event) { + >>>>>>> TEMP_RIGHT_BRANCH + } +3:111,135c + + /** + * Called when a world is attempting to place a block during a snowfall + * + * @param event Relevant event details + */ + public void onSnowForm(SnowFormEvent event) { + } + + /** + * Called when a block is dispensing an item + * + * @param event Relevant event details + */ + public void onBlockDispense(BlockDispenseEvent event) { + } + + /** + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + } + diff --git a/src/python/merge_conflict_analysis_diffs/354/gitmerge_ort_adjacent/diff_BlockListener.java.txt b/src/python/merge_conflict_analysis_diffs/354/gitmerge_ort_adjacent/diff_BlockListener.java.txt new file mode 100644 index 0000000000..259f22b174 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/354/gitmerge_ort_adjacent/diff_BlockListener.java.txt @@ -0,0 +1,60 @@ +====3 +1:3a +2:3a +3:4c + import org.bukkit.plugin.AuthorNagException; +====3 +1:34a +2:34a +3:36c + * @throws BukkitAuthorNagException +====3 +1:36a +2:36a +3:39,40c + onBlockFlow(event); + throw new AuthorNagException("onBlockFlow has been deprecated, use onBlockFromTo"); +====3 +1:38a +2:38a +3:43,45c + // Prevent compilation of old signatures TODO: Remove after 1.4 + @Deprecated public void onBlockFlow(BlockFromToEvent event) {} + +==== +1:103a +2:104,111c + + /** + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + } +3:111,135c + + /** + * Called when a world is attempting to place a block during a snowfall + * + * @param event Relevant event details + */ + public void onSnowForm(SnowFormEvent event) { + } + + /** + * Called when a block is dispensing an item + * + * @param event Relevant event details + */ + public void onBlockDispense(BlockDispenseEvent event) { + } + + /** + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + } + diff --git a/src/python/merge_conflict_analysis_diffs/354/gitmerge_ort_ignorespace/diff_BlockListener.java.txt b/src/python/merge_conflict_analysis_diffs/354/gitmerge_ort_ignorespace/diff_BlockListener.java.txt new file mode 100644 index 0000000000..aa758449c2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/354/gitmerge_ort_ignorespace/diff_BlockListener.java.txt @@ -0,0 +1,77 @@ +====1 +1:3a +2:4c +3:4c + import org.bukkit.plugin.AuthorNagException; +====1 +1:34a +2:36c +3:36c + * @throws BukkitAuthorNagException +====1 +1:36a +2:39,40c +3:39,40c + onBlockFlow(event); + throw new AuthorNagException("onBlockFlow has been deprecated, use onBlockFromTo"); +====1 +1:38a +2:43,45c +3:43,45c + // Prevent compilation of old signatures TODO: Remove after 1.4 + @Deprecated public void onBlockFlow(BlockFromToEvent event) {} + +==== +1:103a +2:111,135c + + /** + <<<<<<< HEAD + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + ||||||| 325fbdc0 + ======= + * Called when a world is attempting to place a block during a snowfall + * + * @param event Relevant event details + */ + public void onSnowForm(SnowFormEvent event) { + } + + /** + * Called when a block is dispensing an item + * + * @param event Relevant event details + */ + public void onBlockDispense(BlockDispenseEvent event) { + >>>>>>> TEMP_RIGHT_BRANCH + } +3:111,135c + + /** + * Called when a world is attempting to place a block during a snowfall + * + * @param event Relevant event details + */ + public void onSnowForm(SnowFormEvent event) { + } + + /** + * Called when a block is dispensing an item + * + * @param event Relevant event details + */ + public void onBlockDispense(BlockDispenseEvent event) { + } + + /** + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + } + diff --git a/src/python/merge_conflict_analysis_diffs/354/gitmerge_ort_ignorespace/diff_Event.java.txt b/src/python/merge_conflict_analysis_diffs/354/gitmerge_ort_ignorespace/diff_Event.java.txt new file mode 100644 index 0000000000..f010a669f8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/354/gitmerge_ort_ignorespace/diff_Event.java.txt @@ -0,0 +1,243 @@ +====1 +1:95a +2:96,100c +3:96,100c + * Represents Entity-based events + */ + ENTITY, + + /** +====1 +1:106c + * Represents Vehicle-based events +2:111,116c +3:111,116c + * Represents Weather-based events + */ + WEATHER, + + /** + * Vehicle-based events +====1 +1:155a +2:166,172c +3:166,172c + * Called when a player has just been authenticated + * + * @see org.bukkit.event.player.PlayerPreLoginEvent + */ + PLAYER_PRELOGIN (Category.PLAYER), + + /** +====1 +1:218a +2:236,242c +3:236,242c + * Called when a player right clicks an entity + * + * @see org.bukkit.event.player.PlayerInteractEntityEvent + */ + PLAYER_INTERACT_ENTITY (Category.PLAYER), + + /** +====3 +1:255c +2:279c + * +3:279c + * +====3 +1:262c +2:286c + * +3:286c + * +====1 +1:267a +2:292,312c +3:292,312c + * Called when a player interacts with the inventory + * + * @see org.bukkit.event.player.PlayerInventoryEvent + */ + PLAYER_INVENTORY(Category.PLAYER), + + /** + * Called when a player enter a bed + * + * @see org.bukkit.event.player.PlayerBedEnterEvent + */ + PLAYER_BED_ENTER(Category.PLAYER), + + /** + * Called when a player leaves a bed + * + * @see org.bukkit.event.player.PlayerBedEnterEvent + */ + PLAYER_BED_LEAVE(Category.PLAYER), + + /** +====1 +1:321a +2:367,373c +3:367,373c + * Called when a block dispenses something + * + * @see org.bukkit.event.block.BlockPlaceEvent + */ + BLOCK_DISPENSE (Category.BLOCK), + + /** +==== +1:356a +2:409,424c + + /** + <<<<<<< HEAD + * Called when a block is successfully smelted (fully) in a furnace. + * + * @see org.bukkit.event.block.FurnaceSmeltEvent + */ + FURNACE_SMELT (Category.BLOCK), + ||||||| 325fbdc0 + ======= + * Called when world attempts to place a snow block during a snowfall + * + * @see org.bukkit.event.block.SnowFormEvent + */ + SNOW_FORM (Category.BLOCK), + >>>>>>> TEMP_RIGHT_BRANCH +3:409,422c + + /** + * Called when a block is successfully smelted (fully) in a furnace. + * + * @see org.bukkit.event.block.FurnaceSmeltEvent + */ + FURNACE_SMELT (Category.BLOCK), + + /** + * Called when world attempts to place a snow block during a snowfall + * + * @see org.bukkit.event.block.SnowFormEvent + */ + SNOW_FORM (Category.BLOCK), +====1 +1:457a +2:526,532c +3:524,530c + * Called when a World's spawn is changed + * + * @see org.bukkit.event.world.SpawnChangeEvent + */ + SPAWN_CHANGE (Category.WORLD), + + /** +====3 +1:459c +2:534c + * +3:532c + * +====1 +1:468a +2:544,561c +3:542,559c + * ENTITY EVENTS + */ + + /** + * Called when a painting is placed by player + * + * @see org.bukkit.event.painting.PaintingCreateEvent + */ + PAINTING_PLACE (Category.ENTITY), + + /** + * Called when a painting is removed + * + * @see org.bukkit.event.painting.PaintingRemoveEvent + */ + PAINTING_BREAK (Category.ENTITY), + + /** +====3 +1:510c +2:603c + * +3:601c + * +====3 +1:513c +2:606c + * +3:604c + * +====3 +1:517c +2:610c + * +3:608c + * +====1 +1:529a +2:623,669c +3:621,667c + * Called when an entity interacts with a block + * This event specifically excludes player entities + * + * @see org.bukkit.event.entity.EntityInteractEvent + */ + ENTITY_INTERACT (Category.LIVING_ENTITY), + + /** + * Called when a creeper gains or loses a power shell + * + * @see org.bukkit.event.entity.CreeperPowerEvent + */ + CREEPER_POWER (Category.LIVING_ENTITY), + + /** + * Called when a pig is zapped, zombifying it + * + * @see org.bukkit.event.entity.PigZapEvent + */ + PIG_ZAP (Category.LIVING_ENTITY), + + /** + * WEATHER EVENTS + */ + + /** + * Called when a lightning entity strikes somewhere + * + * @see org.bukkit.event.weather.LightningStrikeEvent + */ + LIGHTNING_STRIKE (Category.WEATHER), + + /** + * Called when the weather in a world changes + * + * @see org.bukkit.event.weather.WeatherChangeEvent + */ + WEATHER_CHANGE (Category.WEATHER), + + /** + * Called when the thunder state in a world changes + * + * @see org.bukkit.event.weather.ThunderChangeEvent + */ + THUNDER_CHANGE (Category.WEATHER), + + /** +====1 +1:540a +2:681,687c +3:679,685c + * Called when a vehicle is destroyed + * + * @see org.bukkit.event.vehicle.VehicleDestroyEvent + */ + VEHICLE_DESTROY (Category.VEHICLE), + + /** diff --git a/src/python/merge_conflict_analysis_diffs/354/gitmerge_ort_imports/diff_BlockListener.java.txt b/src/python/merge_conflict_analysis_diffs/354/gitmerge_ort_imports/diff_BlockListener.java.txt new file mode 100644 index 0000000000..259f22b174 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/354/gitmerge_ort_imports/diff_BlockListener.java.txt @@ -0,0 +1,60 @@ +====3 +1:3a +2:3a +3:4c + import org.bukkit.plugin.AuthorNagException; +====3 +1:34a +2:34a +3:36c + * @throws BukkitAuthorNagException +====3 +1:36a +2:36a +3:39,40c + onBlockFlow(event); + throw new AuthorNagException("onBlockFlow has been deprecated, use onBlockFromTo"); +====3 +1:38a +2:38a +3:43,45c + // Prevent compilation of old signatures TODO: Remove after 1.4 + @Deprecated public void onBlockFlow(BlockFromToEvent event) {} + +==== +1:103a +2:104,111c + + /** + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + } +3:111,135c + + /** + * Called when a world is attempting to place a block during a snowfall + * + * @param event Relevant event details + */ + public void onSnowForm(SnowFormEvent event) { + } + + /** + * Called when a block is dispensing an item + * + * @param event Relevant event details + */ + public void onBlockDispense(BlockDispenseEvent event) { + } + + /** + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + } + diff --git a/src/python/merge_conflict_analysis_diffs/354/gitmerge_ort_imports_ignorespace/diff_BlockListener.java.txt b/src/python/merge_conflict_analysis_diffs/354/gitmerge_ort_imports_ignorespace/diff_BlockListener.java.txt new file mode 100644 index 0000000000..259f22b174 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/354/gitmerge_ort_imports_ignorespace/diff_BlockListener.java.txt @@ -0,0 +1,60 @@ +====3 +1:3a +2:3a +3:4c + import org.bukkit.plugin.AuthorNagException; +====3 +1:34a +2:34a +3:36c + * @throws BukkitAuthorNagException +====3 +1:36a +2:36a +3:39,40c + onBlockFlow(event); + throw new AuthorNagException("onBlockFlow has been deprecated, use onBlockFromTo"); +====3 +1:38a +2:38a +3:43,45c + // Prevent compilation of old signatures TODO: Remove after 1.4 + @Deprecated public void onBlockFlow(BlockFromToEvent event) {} + +==== +1:103a +2:104,111c + + /** + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + } +3:111,135c + + /** + * Called when a world is attempting to place a block during a snowfall + * + * @param event Relevant event details + */ + public void onSnowForm(SnowFormEvent event) { + } + + /** + * Called when a block is dispensing an item + * + * @param event Relevant event details + */ + public void onBlockDispense(BlockDispenseEvent event) { + } + + /** + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + } + diff --git a/src/python/merge_conflict_analysis_diffs/354/gitmerge_ort_imports_ignorespace/diff_Event.java.txt b/src/python/merge_conflict_analysis_diffs/354/gitmerge_ort_imports_ignorespace/diff_Event.java.txt new file mode 100644 index 0000000000..394e11589a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/354/gitmerge_ort_imports_ignorespace/diff_Event.java.txt @@ -0,0 +1,240 @@ +====3 +1:95a +2:95a +3:96,100c + * Represents Entity-based events + */ + ENTITY, + + /** +====3 +1:106c +2:106c + * Represents Vehicle-based events +3:111,116c + * Represents Weather-based events + */ + WEATHER, + + /** + * Vehicle-based events +====3 +1:155a +2:155a +3:166,172c + * Called when a player has just been authenticated + * + * @see org.bukkit.event.player.PlayerPreLoginEvent + */ + PLAYER_PRELOGIN (Category.PLAYER), + + /** +====3 +1:218a +2:218a +3:236,242c + * Called when a player right clicks an entity + * + * @see org.bukkit.event.player.PlayerInteractEntityEvent + */ + PLAYER_INTERACT_ENTITY (Category.PLAYER), + + /** +====3 +1:255c +2:255c + * +3:279c + * +====3 +1:262c +2:262c + * +3:286c + * +====3 +1:267a +2:267a +3:292,312c + * Called when a player interacts with the inventory + * + * @see org.bukkit.event.player.PlayerInventoryEvent + */ + PLAYER_INVENTORY(Category.PLAYER), + + /** + * Called when a player enter a bed + * + * @see org.bukkit.event.player.PlayerBedEnterEvent + */ + PLAYER_BED_ENTER(Category.PLAYER), + + /** + * Called when a player leaves a bed + * + * @see org.bukkit.event.player.PlayerBedEnterEvent + */ + PLAYER_BED_LEAVE(Category.PLAYER), + + /** +====3 +1:321a +2:321a +3:367,373c + * Called when a block dispenses something + * + * @see org.bukkit.event.block.BlockPlaceEvent + */ + BLOCK_DISPENSE (Category.BLOCK), + + /** +==== +1:357,358c + + /** +2:357,365c + + /** + * Called when a block is successfully smelted (fully) in a furnace. + * + * @see org.bukkit.event.block.FurnaceSmeltEvent + */ + FURNACE_SMELT (Category.BLOCK), + + /** +3:409,424c + + /** + * Called when a block is successfully smelted (fully) in a furnace. + * + * @see org.bukkit.event.block.FurnaceSmeltEvent + */ + FURNACE_SMELT (Category.BLOCK), + + /** + * Called when world attempts to place a snow block during a snowfall + * + * @see org.bukkit.event.block.SnowFormEvent + */ + SNOW_FORM (Category.BLOCK), + + /** +====3 +1:457a +2:464a +3:524,530c + * Called when a World's spawn is changed + * + * @see org.bukkit.event.world.SpawnChangeEvent + */ + SPAWN_CHANGE (Category.WORLD), + + /** +====3 +1:459c +2:466c + * +3:532c + * +====3 +1:468a +2:475a +3:542,559c + * ENTITY EVENTS + */ + + /** + * Called when a painting is placed by player + * + * @see org.bukkit.event.painting.PaintingCreateEvent + */ + PAINTING_PLACE (Category.ENTITY), + + /** + * Called when a painting is removed + * + * @see org.bukkit.event.painting.PaintingRemoveEvent + */ + PAINTING_BREAK (Category.ENTITY), + + /** +====3 +1:510c +2:517c + * +3:601c + * +====3 +1:513c +2:520c + * +3:604c + * +====3 +1:517c +2:524c + * +3:608c + * +====3 +1:529a +2:536a +3:621,667c + * Called when an entity interacts with a block + * This event specifically excludes player entities + * + * @see org.bukkit.event.entity.EntityInteractEvent + */ + ENTITY_INTERACT (Category.LIVING_ENTITY), + + /** + * Called when a creeper gains or loses a power shell + * + * @see org.bukkit.event.entity.CreeperPowerEvent + */ + CREEPER_POWER (Category.LIVING_ENTITY), + + /** + * Called when a pig is zapped, zombifying it + * + * @see org.bukkit.event.entity.PigZapEvent + */ + PIG_ZAP (Category.LIVING_ENTITY), + + /** + * WEATHER EVENTS + */ + + /** + * Called when a lightning entity strikes somewhere + * + * @see org.bukkit.event.weather.LightningStrikeEvent + */ + LIGHTNING_STRIKE (Category.WEATHER), + + /** + * Called when the weather in a world changes + * + * @see org.bukkit.event.weather.WeatherChangeEvent + */ + WEATHER_CHANGE (Category.WEATHER), + + /** + * Called when the thunder state in a world changes + * + * @see org.bukkit.event.weather.ThunderChangeEvent + */ + THUNDER_CHANGE (Category.WEATHER), + + /** +====3 +1:540a +2:547a +3:679,685c + * Called when a vehicle is destroyed + * + * @see org.bukkit.event.vehicle.VehicleDestroyEvent + */ + VEHICLE_DESTROY (Category.VEHICLE), + + /** diff --git a/src/python/merge_conflict_analysis_diffs/354/gitmerge_recursive_histogram/diff_BlockListener.java.txt b/src/python/merge_conflict_analysis_diffs/354/gitmerge_recursive_histogram/diff_BlockListener.java.txt new file mode 100644 index 0000000000..aa758449c2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/354/gitmerge_recursive_histogram/diff_BlockListener.java.txt @@ -0,0 +1,77 @@ +====1 +1:3a +2:4c +3:4c + import org.bukkit.plugin.AuthorNagException; +====1 +1:34a +2:36c +3:36c + * @throws BukkitAuthorNagException +====1 +1:36a +2:39,40c +3:39,40c + onBlockFlow(event); + throw new AuthorNagException("onBlockFlow has been deprecated, use onBlockFromTo"); +====1 +1:38a +2:43,45c +3:43,45c + // Prevent compilation of old signatures TODO: Remove after 1.4 + @Deprecated public void onBlockFlow(BlockFromToEvent event) {} + +==== +1:103a +2:111,135c + + /** + <<<<<<< HEAD + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + ||||||| 325fbdc0 + ======= + * Called when a world is attempting to place a block during a snowfall + * + * @param event Relevant event details + */ + public void onSnowForm(SnowFormEvent event) { + } + + /** + * Called when a block is dispensing an item + * + * @param event Relevant event details + */ + public void onBlockDispense(BlockDispenseEvent event) { + >>>>>>> TEMP_RIGHT_BRANCH + } +3:111,135c + + /** + * Called when a world is attempting to place a block during a snowfall + * + * @param event Relevant event details + */ + public void onSnowForm(SnowFormEvent event) { + } + + /** + * Called when a block is dispensing an item + * + * @param event Relevant event details + */ + public void onBlockDispense(BlockDispenseEvent event) { + } + + /** + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + } + diff --git a/src/python/merge_conflict_analysis_diffs/354/gitmerge_recursive_ignorespace/diff_BlockListener.java.txt b/src/python/merge_conflict_analysis_diffs/354/gitmerge_recursive_ignorespace/diff_BlockListener.java.txt new file mode 100644 index 0000000000..aa758449c2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/354/gitmerge_recursive_ignorespace/diff_BlockListener.java.txt @@ -0,0 +1,77 @@ +====1 +1:3a +2:4c +3:4c + import org.bukkit.plugin.AuthorNagException; +====1 +1:34a +2:36c +3:36c + * @throws BukkitAuthorNagException +====1 +1:36a +2:39,40c +3:39,40c + onBlockFlow(event); + throw new AuthorNagException("onBlockFlow has been deprecated, use onBlockFromTo"); +====1 +1:38a +2:43,45c +3:43,45c + // Prevent compilation of old signatures TODO: Remove after 1.4 + @Deprecated public void onBlockFlow(BlockFromToEvent event) {} + +==== +1:103a +2:111,135c + + /** + <<<<<<< HEAD + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + ||||||| 325fbdc0 + ======= + * Called when a world is attempting to place a block during a snowfall + * + * @param event Relevant event details + */ + public void onSnowForm(SnowFormEvent event) { + } + + /** + * Called when a block is dispensing an item + * + * @param event Relevant event details + */ + public void onBlockDispense(BlockDispenseEvent event) { + >>>>>>> TEMP_RIGHT_BRANCH + } +3:111,135c + + /** + * Called when a world is attempting to place a block during a snowfall + * + * @param event Relevant event details + */ + public void onSnowForm(SnowFormEvent event) { + } + + /** + * Called when a block is dispensing an item + * + * @param event Relevant event details + */ + public void onBlockDispense(BlockDispenseEvent event) { + } + + /** + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + } + diff --git a/src/python/merge_conflict_analysis_diffs/354/gitmerge_recursive_ignorespace/diff_Event.java.txt b/src/python/merge_conflict_analysis_diffs/354/gitmerge_recursive_ignorespace/diff_Event.java.txt new file mode 100644 index 0000000000..f010a669f8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/354/gitmerge_recursive_ignorespace/diff_Event.java.txt @@ -0,0 +1,243 @@ +====1 +1:95a +2:96,100c +3:96,100c + * Represents Entity-based events + */ + ENTITY, + + /** +====1 +1:106c + * Represents Vehicle-based events +2:111,116c +3:111,116c + * Represents Weather-based events + */ + WEATHER, + + /** + * Vehicle-based events +====1 +1:155a +2:166,172c +3:166,172c + * Called when a player has just been authenticated + * + * @see org.bukkit.event.player.PlayerPreLoginEvent + */ + PLAYER_PRELOGIN (Category.PLAYER), + + /** +====1 +1:218a +2:236,242c +3:236,242c + * Called when a player right clicks an entity + * + * @see org.bukkit.event.player.PlayerInteractEntityEvent + */ + PLAYER_INTERACT_ENTITY (Category.PLAYER), + + /** +====3 +1:255c +2:279c + * +3:279c + * +====3 +1:262c +2:286c + * +3:286c + * +====1 +1:267a +2:292,312c +3:292,312c + * Called when a player interacts with the inventory + * + * @see org.bukkit.event.player.PlayerInventoryEvent + */ + PLAYER_INVENTORY(Category.PLAYER), + + /** + * Called when a player enter a bed + * + * @see org.bukkit.event.player.PlayerBedEnterEvent + */ + PLAYER_BED_ENTER(Category.PLAYER), + + /** + * Called when a player leaves a bed + * + * @see org.bukkit.event.player.PlayerBedEnterEvent + */ + PLAYER_BED_LEAVE(Category.PLAYER), + + /** +====1 +1:321a +2:367,373c +3:367,373c + * Called when a block dispenses something + * + * @see org.bukkit.event.block.BlockPlaceEvent + */ + BLOCK_DISPENSE (Category.BLOCK), + + /** +==== +1:356a +2:409,424c + + /** + <<<<<<< HEAD + * Called when a block is successfully smelted (fully) in a furnace. + * + * @see org.bukkit.event.block.FurnaceSmeltEvent + */ + FURNACE_SMELT (Category.BLOCK), + ||||||| 325fbdc0 + ======= + * Called when world attempts to place a snow block during a snowfall + * + * @see org.bukkit.event.block.SnowFormEvent + */ + SNOW_FORM (Category.BLOCK), + >>>>>>> TEMP_RIGHT_BRANCH +3:409,422c + + /** + * Called when a block is successfully smelted (fully) in a furnace. + * + * @see org.bukkit.event.block.FurnaceSmeltEvent + */ + FURNACE_SMELT (Category.BLOCK), + + /** + * Called when world attempts to place a snow block during a snowfall + * + * @see org.bukkit.event.block.SnowFormEvent + */ + SNOW_FORM (Category.BLOCK), +====1 +1:457a +2:526,532c +3:524,530c + * Called when a World's spawn is changed + * + * @see org.bukkit.event.world.SpawnChangeEvent + */ + SPAWN_CHANGE (Category.WORLD), + + /** +====3 +1:459c +2:534c + * +3:532c + * +====1 +1:468a +2:544,561c +3:542,559c + * ENTITY EVENTS + */ + + /** + * Called when a painting is placed by player + * + * @see org.bukkit.event.painting.PaintingCreateEvent + */ + PAINTING_PLACE (Category.ENTITY), + + /** + * Called when a painting is removed + * + * @see org.bukkit.event.painting.PaintingRemoveEvent + */ + PAINTING_BREAK (Category.ENTITY), + + /** +====3 +1:510c +2:603c + * +3:601c + * +====3 +1:513c +2:606c + * +3:604c + * +====3 +1:517c +2:610c + * +3:608c + * +====1 +1:529a +2:623,669c +3:621,667c + * Called when an entity interacts with a block + * This event specifically excludes player entities + * + * @see org.bukkit.event.entity.EntityInteractEvent + */ + ENTITY_INTERACT (Category.LIVING_ENTITY), + + /** + * Called when a creeper gains or loses a power shell + * + * @see org.bukkit.event.entity.CreeperPowerEvent + */ + CREEPER_POWER (Category.LIVING_ENTITY), + + /** + * Called when a pig is zapped, zombifying it + * + * @see org.bukkit.event.entity.PigZapEvent + */ + PIG_ZAP (Category.LIVING_ENTITY), + + /** + * WEATHER EVENTS + */ + + /** + * Called when a lightning entity strikes somewhere + * + * @see org.bukkit.event.weather.LightningStrikeEvent + */ + LIGHTNING_STRIKE (Category.WEATHER), + + /** + * Called when the weather in a world changes + * + * @see org.bukkit.event.weather.WeatherChangeEvent + */ + WEATHER_CHANGE (Category.WEATHER), + + /** + * Called when the thunder state in a world changes + * + * @see org.bukkit.event.weather.ThunderChangeEvent + */ + THUNDER_CHANGE (Category.WEATHER), + + /** +====1 +1:540a +2:681,687c +3:679,685c + * Called when a vehicle is destroyed + * + * @see org.bukkit.event.vehicle.VehicleDestroyEvent + */ + VEHICLE_DESTROY (Category.VEHICLE), + + /** diff --git a/src/python/merge_conflict_analysis_diffs/354/gitmerge_recursive_minimal/diff_BlockListener.java.txt b/src/python/merge_conflict_analysis_diffs/354/gitmerge_recursive_minimal/diff_BlockListener.java.txt new file mode 100644 index 0000000000..aa758449c2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/354/gitmerge_recursive_minimal/diff_BlockListener.java.txt @@ -0,0 +1,77 @@ +====1 +1:3a +2:4c +3:4c + import org.bukkit.plugin.AuthorNagException; +====1 +1:34a +2:36c +3:36c + * @throws BukkitAuthorNagException +====1 +1:36a +2:39,40c +3:39,40c + onBlockFlow(event); + throw new AuthorNagException("onBlockFlow has been deprecated, use onBlockFromTo"); +====1 +1:38a +2:43,45c +3:43,45c + // Prevent compilation of old signatures TODO: Remove after 1.4 + @Deprecated public void onBlockFlow(BlockFromToEvent event) {} + +==== +1:103a +2:111,135c + + /** + <<<<<<< HEAD + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + ||||||| 325fbdc0 + ======= + * Called when a world is attempting to place a block during a snowfall + * + * @param event Relevant event details + */ + public void onSnowForm(SnowFormEvent event) { + } + + /** + * Called when a block is dispensing an item + * + * @param event Relevant event details + */ + public void onBlockDispense(BlockDispenseEvent event) { + >>>>>>> TEMP_RIGHT_BRANCH + } +3:111,135c + + /** + * Called when a world is attempting to place a block during a snowfall + * + * @param event Relevant event details + */ + public void onSnowForm(SnowFormEvent event) { + } + + /** + * Called when a block is dispensing an item + * + * @param event Relevant event details + */ + public void onBlockDispense(BlockDispenseEvent event) { + } + + /** + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + } + diff --git a/src/python/merge_conflict_analysis_diffs/354/gitmerge_recursive_myers/diff_BlockListener.java.txt b/src/python/merge_conflict_analysis_diffs/354/gitmerge_recursive_myers/diff_BlockListener.java.txt new file mode 100644 index 0000000000..aa758449c2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/354/gitmerge_recursive_myers/diff_BlockListener.java.txt @@ -0,0 +1,77 @@ +====1 +1:3a +2:4c +3:4c + import org.bukkit.plugin.AuthorNagException; +====1 +1:34a +2:36c +3:36c + * @throws BukkitAuthorNagException +====1 +1:36a +2:39,40c +3:39,40c + onBlockFlow(event); + throw new AuthorNagException("onBlockFlow has been deprecated, use onBlockFromTo"); +====1 +1:38a +2:43,45c +3:43,45c + // Prevent compilation of old signatures TODO: Remove after 1.4 + @Deprecated public void onBlockFlow(BlockFromToEvent event) {} + +==== +1:103a +2:111,135c + + /** + <<<<<<< HEAD + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + ||||||| 325fbdc0 + ======= + * Called when a world is attempting to place a block during a snowfall + * + * @param event Relevant event details + */ + public void onSnowForm(SnowFormEvent event) { + } + + /** + * Called when a block is dispensing an item + * + * @param event Relevant event details + */ + public void onBlockDispense(BlockDispenseEvent event) { + >>>>>>> TEMP_RIGHT_BRANCH + } +3:111,135c + + /** + * Called when a world is attempting to place a block during a snowfall + * + * @param event Relevant event details + */ + public void onSnowForm(SnowFormEvent event) { + } + + /** + * Called when a block is dispensing an item + * + * @param event Relevant event details + */ + public void onBlockDispense(BlockDispenseEvent event) { + } + + /** + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + } + diff --git a/src/python/merge_conflict_analysis_diffs/354/gitmerge_recursive_patience/diff_BlockListener.java.txt b/src/python/merge_conflict_analysis_diffs/354/gitmerge_recursive_patience/diff_BlockListener.java.txt new file mode 100644 index 0000000000..aa758449c2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/354/gitmerge_recursive_patience/diff_BlockListener.java.txt @@ -0,0 +1,77 @@ +====1 +1:3a +2:4c +3:4c + import org.bukkit.plugin.AuthorNagException; +====1 +1:34a +2:36c +3:36c + * @throws BukkitAuthorNagException +====1 +1:36a +2:39,40c +3:39,40c + onBlockFlow(event); + throw new AuthorNagException("onBlockFlow has been deprecated, use onBlockFromTo"); +====1 +1:38a +2:43,45c +3:43,45c + // Prevent compilation of old signatures TODO: Remove after 1.4 + @Deprecated public void onBlockFlow(BlockFromToEvent event) {} + +==== +1:103a +2:111,135c + + /** + <<<<<<< HEAD + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + ||||||| 325fbdc0 + ======= + * Called when a world is attempting to place a block during a snowfall + * + * @param event Relevant event details + */ + public void onSnowForm(SnowFormEvent event) { + } + + /** + * Called when a block is dispensing an item + * + * @param event Relevant event details + */ + public void onBlockDispense(BlockDispenseEvent event) { + >>>>>>> TEMP_RIGHT_BRANCH + } +3:111,135c + + /** + * Called when a world is attempting to place a block during a snowfall + * + * @param event Relevant event details + */ + public void onSnowForm(SnowFormEvent event) { + } + + /** + * Called when a block is dispensing an item + * + * @param event Relevant event details + */ + public void onBlockDispense(BlockDispenseEvent event) { + } + + /** + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + } + diff --git a/src/python/merge_conflict_analysis_diffs/354/intellimerge/diff_BlockListener.java.txt b/src/python/merge_conflict_analysis_diffs/354/intellimerge/diff_BlockListener.java.txt new file mode 100644 index 0000000000..ffa9b7c7a2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/354/intellimerge/diff_BlockListener.java.txt @@ -0,0 +1,80 @@ +====1 +1:3a +2:4c +3:4c + import org.bukkit.plugin.AuthorNagException; +====1 +1:34a +2:36c +3:36c + * @throws BukkitAuthorNagException +====1 +1:36a +2:39,40c +3:39,40c + onBlockFlow(event); + throw new AuthorNagException("onBlockFlow has been deprecated, use onBlockFromTo"); +====1 +1:38a +2:43,45c +3:43,45c + // Prevent compilation of old signatures TODO: Remove after 1.4 + @Deprecated public void onBlockFlow(BlockFromToEvent event) {} + +==== +1:103a +2:111,138c + <<<<<<< HEAD + + /** + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + } + ||||||| 325fbdc0 + ======= + + /** + * Called when a world is attempting to place a block during a snowfall + * + * @param event Relevant event details + */ + public void onSnowForm(SnowFormEvent event) { + } + + /** + * Called when a block is dispensing an item + * + * @param event Relevant event details + */ + public void onBlockDispense(BlockDispenseEvent event) { + } + >>>>>>> TEMP_RIGHT_BRANCH +3:111,135c + + /** + * Called when a world is attempting to place a block during a snowfall + * + * @param event Relevant event details + */ + public void onSnowForm(SnowFormEvent event) { + } + + /** + * Called when a block is dispensing an item + * + * @param event Relevant event details + */ + public void onBlockDispense(BlockDispenseEvent event) { + } + + /** + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + } + diff --git a/src/python/merge_conflict_analysis_diffs/354/spork/diff_BlockListener.java.txt b/src/python/merge_conflict_analysis_diffs/354/spork/diff_BlockListener.java.txt new file mode 100644 index 0000000000..259f22b174 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/354/spork/diff_BlockListener.java.txt @@ -0,0 +1,60 @@ +====3 +1:3a +2:3a +3:4c + import org.bukkit.plugin.AuthorNagException; +====3 +1:34a +2:34a +3:36c + * @throws BukkitAuthorNagException +====3 +1:36a +2:36a +3:39,40c + onBlockFlow(event); + throw new AuthorNagException("onBlockFlow has been deprecated, use onBlockFromTo"); +====3 +1:38a +2:38a +3:43,45c + // Prevent compilation of old signatures TODO: Remove after 1.4 + @Deprecated public void onBlockFlow(BlockFromToEvent event) {} + +==== +1:103a +2:104,111c + + /** + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + } +3:111,135c + + /** + * Called when a world is attempting to place a block during a snowfall + * + * @param event Relevant event details + */ + public void onSnowForm(SnowFormEvent event) { + } + + /** + * Called when a block is dispensing an item + * + * @param event Relevant event details + */ + public void onBlockDispense(BlockDispenseEvent event) { + } + + /** + * Called when a furnace successfully smelts an item + * + * @param event Relevant event details + */ + public void onFurnaceSmelt(FurnaceSmeltEvent event) { + } + diff --git a/src/python/merge_conflict_analysis_diffs/354/spork/diff_Event.java.txt b/src/python/merge_conflict_analysis_diffs/354/spork/diff_Event.java.txt new file mode 100644 index 0000000000..394e11589a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/354/spork/diff_Event.java.txt @@ -0,0 +1,240 @@ +====3 +1:95a +2:95a +3:96,100c + * Represents Entity-based events + */ + ENTITY, + + /** +====3 +1:106c +2:106c + * Represents Vehicle-based events +3:111,116c + * Represents Weather-based events + */ + WEATHER, + + /** + * Vehicle-based events +====3 +1:155a +2:155a +3:166,172c + * Called when a player has just been authenticated + * + * @see org.bukkit.event.player.PlayerPreLoginEvent + */ + PLAYER_PRELOGIN (Category.PLAYER), + + /** +====3 +1:218a +2:218a +3:236,242c + * Called when a player right clicks an entity + * + * @see org.bukkit.event.player.PlayerInteractEntityEvent + */ + PLAYER_INTERACT_ENTITY (Category.PLAYER), + + /** +====3 +1:255c +2:255c + * +3:279c + * +====3 +1:262c +2:262c + * +3:286c + * +====3 +1:267a +2:267a +3:292,312c + * Called when a player interacts with the inventory + * + * @see org.bukkit.event.player.PlayerInventoryEvent + */ + PLAYER_INVENTORY(Category.PLAYER), + + /** + * Called when a player enter a bed + * + * @see org.bukkit.event.player.PlayerBedEnterEvent + */ + PLAYER_BED_ENTER(Category.PLAYER), + + /** + * Called when a player leaves a bed + * + * @see org.bukkit.event.player.PlayerBedEnterEvent + */ + PLAYER_BED_LEAVE(Category.PLAYER), + + /** +====3 +1:321a +2:321a +3:367,373c + * Called when a block dispenses something + * + * @see org.bukkit.event.block.BlockPlaceEvent + */ + BLOCK_DISPENSE (Category.BLOCK), + + /** +==== +1:357,358c + + /** +2:357,365c + + /** + * Called when a block is successfully smelted (fully) in a furnace. + * + * @see org.bukkit.event.block.FurnaceSmeltEvent + */ + FURNACE_SMELT (Category.BLOCK), + + /** +3:409,424c + + /** + * Called when a block is successfully smelted (fully) in a furnace. + * + * @see org.bukkit.event.block.FurnaceSmeltEvent + */ + FURNACE_SMELT (Category.BLOCK), + + /** + * Called when world attempts to place a snow block during a snowfall + * + * @see org.bukkit.event.block.SnowFormEvent + */ + SNOW_FORM (Category.BLOCK), + + /** +====3 +1:457a +2:464a +3:524,530c + * Called when a World's spawn is changed + * + * @see org.bukkit.event.world.SpawnChangeEvent + */ + SPAWN_CHANGE (Category.WORLD), + + /** +====3 +1:459c +2:466c + * +3:532c + * +====3 +1:468a +2:475a +3:542,559c + * ENTITY EVENTS + */ + + /** + * Called when a painting is placed by player + * + * @see org.bukkit.event.painting.PaintingCreateEvent + */ + PAINTING_PLACE (Category.ENTITY), + + /** + * Called when a painting is removed + * + * @see org.bukkit.event.painting.PaintingRemoveEvent + */ + PAINTING_BREAK (Category.ENTITY), + + /** +====3 +1:510c +2:517c + * +3:601c + * +====3 +1:513c +2:520c + * +3:604c + * +====3 +1:517c +2:524c + * +3:608c + * +====3 +1:529a +2:536a +3:621,667c + * Called when an entity interacts with a block + * This event specifically excludes player entities + * + * @see org.bukkit.event.entity.EntityInteractEvent + */ + ENTITY_INTERACT (Category.LIVING_ENTITY), + + /** + * Called when a creeper gains or loses a power shell + * + * @see org.bukkit.event.entity.CreeperPowerEvent + */ + CREEPER_POWER (Category.LIVING_ENTITY), + + /** + * Called when a pig is zapped, zombifying it + * + * @see org.bukkit.event.entity.PigZapEvent + */ + PIG_ZAP (Category.LIVING_ENTITY), + + /** + * WEATHER EVENTS + */ + + /** + * Called when a lightning entity strikes somewhere + * + * @see org.bukkit.event.weather.LightningStrikeEvent + */ + LIGHTNING_STRIKE (Category.WEATHER), + + /** + * Called when the weather in a world changes + * + * @see org.bukkit.event.weather.WeatherChangeEvent + */ + WEATHER_CHANGE (Category.WEATHER), + + /** + * Called when the thunder state in a world changes + * + * @see org.bukkit.event.weather.ThunderChangeEvent + */ + THUNDER_CHANGE (Category.WEATHER), + + /** +====3 +1:540a +2:547a +3:679,685c + * Called when a vehicle is destroyed + * + * @see org.bukkit.event.vehicle.VehicleDestroyEvent + */ + VEHICLE_DESTROY (Category.VEHICLE), + + /** diff --git a/src/python/merge_conflict_analysis_diffs/354/spork/diff_JavaPluginLoader.java.txt b/src/python/merge_conflict_analysis_diffs/354/spork/diff_JavaPluginLoader.java.txt new file mode 100644 index 0000000000..d2ac11221b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/354/spork/diff_JavaPluginLoader.java.txt @@ -0,0 +1,279 @@ +====3 +1:14a +2:14a +3:15c + import java.util.logging.Level; +====3 +1:20a +2:20a +3:22c + import org.bukkit.event.painting.*; +====3 +1:25a +2:25a +3:28c + import org.bukkit.event.weather.*; +====3 +1:26a +2:26a +3:30c + import org.yaml.snakeyaml.error.YAMLException; +====3 +1:43a +2:43a +3:48,51c + return loadPlugin(file, false); + } + + public Plugin loadPlugin(File file, boolean ignoreSoftDependencies) throws InvalidPluginException, InvalidDescriptionException, UnknownDependencyException { +====3 +1:64a +2:64a +3:73,74c + } catch (YAMLException ex) { + throw new InvalidPluginException(ex); +====3 +1:67c +2:67c + File dataFolder = getDataFolder(file); +3:77,111c + File dataFolder = new File(file.getParentFile(), description.getName()); + File oldDataFolder = getDataFolder(file); + + // Found old data folder + if (dataFolder.equals(oldDataFolder)) { + // They are equal -- nothing needs to be done! + } else if (dataFolder.isDirectory() && oldDataFolder.isDirectory()) { + server.getLogger().log( Level.INFO, String.format( + "While loading %s (%s) found old-data folder: %s next to the new one: %s", + description.getName(), + file, + oldDataFolder, + dataFolder + )); + } else if (oldDataFolder.isDirectory() && !dataFolder.exists()) { + if (!oldDataFolder.renameTo(dataFolder)) { + throw new InvalidPluginException(new Exception("Unable to rename old data folder: '" + oldDataFolder + "' to: '" + dataFolder + "'")); + } + server.getLogger().log( Level.INFO, String.format( + "While loading %s (%s) renamed data folder: '%s' to '%s'", + description.getName(), + file, + oldDataFolder, + dataFolder + )); + } + + if (dataFolder.exists() && !dataFolder.isDirectory()) { + throw new InvalidPluginException(new Exception(String.format( + "Projected datafolder: '%s' for %s (%s) exists and is not a directory", + dataFolder, + description.getName(), + file + ))); + } +====3 +1:88a +2:88a +3:133,154c + if (!ignoreSoftDependencies) { + ArrayList softDepend; + try { + softDepend = (ArrayList)description.getSoftDepend(); + if (softDepend == null) { + softDepend = new ArrayList(); + } + } catch (ClassCastException ex) { + throw new InvalidPluginException(ex); + } + + for (String pluginName : softDepend) { + if (loaders == null) { + throw new UnknownSoftDependencyException(pluginName); + } + PluginClassLoader current = loaders.get(pluginName); + if (current == null) { + throw new UnknownSoftDependencyException(pluginName); + } + } + } + +====3 +1:154c +2:154c + if(!classes.containsKey(name)) { +3:220c + if (!classes.containsKey(name)) { +====3 +1:216a +2:216a +3:283,288c + case PLAYER_INTERACT_ENTITY: + return new EventExecutor() { + public void execute(Listener listener, Event event) { + ((PlayerListener) listener).onPlayerInteractEntity((PlayerInteractEntityEvent) event); + } + }; +====3 +1:222a +2:222a +3:295,300c + case PLAYER_PRELOGIN: + return new EventExecutor() { + public void execute(Listener listener, Event event) { + ((PlayerListener) listener).onPlayerPreLogin((PlayerPreLoginEvent) event); + } + }; +====3 +1:276a +2:276a +3:355,366c + case PLAYER_BED_ENTER: + return new EventExecutor() { + public void execute(Listener listener, Event event) { + ((PlayerListener) listener).onPlayerBedEnter((PlayerBedEnterEvent) event); + } + }; + case PLAYER_BED_LEAVE: + return new EventExecutor() { + public void execute(Listener listener, Event event) { + ((PlayerListener) listener).onPlayerBedLeave((PlayerBedLeaveEvent) event); + } + }; +====1 +1:314a +2:315,320c +3:405,410c + case FURNACE_SMELT: + return new EventExecutor() { + public void execute(Listener listener, Event event) { + ((BlockListener) listener).onFurnaceSmelt((FurnaceSmeltEvent) event); + } + }; +====3 +1:344a +2:350a +3:441,452c + case SNOW_FORM: + return new EventExecutor() { + public void execute(Listener listener, Event event) { + ((BlockListener) listener).onSnowForm((SnowFormEvent) event); + } + }; + case BLOCK_DISPENSE: + return new EventExecutor() { + public void execute(Listener listener, Event event) { + ((BlockListener) listener).onBlockDispense((BlockDispenseEvent) event); + } + }; +====3 +1:378a +2:384a +3:487,492c + case SPAWN_CHANGE: + return new EventExecutor() { + public void execute(Listener listener, Event event) { + ((WorldListener) listener).onSpawnChange((SpawnChangeEvent) event); + } + }; +====3 +1:391a +2:397a +3:506,519c + //Painting Events + case PAINTING_PLACE: + return new EventExecutor() { + public void execute(Listener listener, Event event) { + ((EntityListener) listener).onPaintingPlace((PaintingPlaceEvent) event); + } + }; + case PAINTING_BREAK: + return new EventExecutor() { + public void execute(Listener listener, Event event) { + ((EntityListener) listener).onPaintingBreak((PaintingBreakEvent) event); + } + }; + +====3 +1:428a +2:434a +3:557,562c + case ENTITY_INTERACT: + return new EventExecutor() { + public void execute(Listener listener, Event event) { + ((EntityListener) listener).onEntityInteract((EntityInteractEvent) event); + } + }; +====3 +1:434a +2:440a +3:569,580c + case PIG_ZAP: + return new EventExecutor() { + public void execute(Listener listener, Event event) { + ((EntityListener) listener).onPigZap((PigZapEvent) event); + } + }; + case CREEPER_POWER: + return new EventExecutor() { + public void execute(Listener listener, Event event) { + ((EntityListener) listener).onCreeperPower((CreeperPowerEvent) event); + } + }; +====3 +1:448a +2:454a +3:595,599c + case VEHICLE_DESTROY: + return new EventExecutor() { public void execute( Listener listener, Event event ) { + ((VehicleListener)listener).onVehicleDestroy( (VehicleDestroyEvent)event ); + } + }; +====3 +1:485a +2:491a +3:637,656c + // Weather Events + case WEATHER_CHANGE: + return new EventExecutor() { + public void execute(Listener listener, Event event) { + ((WeatherListener) listener).onWeatherChange((WeatherChangeEvent) event); + } + }; + case THUNDER_CHANGE: + return new EventExecutor() { + public void execute(Listener listener, Event event) { + ((WeatherListener) listener).onThunderChange((ThunderChangeEvent) event); + } + }; + case LIGHTNING_STRIKE: + return new EventExecutor() { + public void execute(Listener listener, Event event) { + ((WeatherListener) listener).onLightningStrike((LightningStrikeEvent) event); + } + }; + +====3 +1:511c +2:517c + jPlugin.setEnabled(true); +3:682,690c + try { + jPlugin.setEnabled(true); + } catch (Throwable ex) { + server.getLogger().log(Level.SEVERE, "Error occurred while enabling " + plugin.getDescription().getFullName() + " (Is it up to date?): " + ex.getMessage(), ex); + } + + // Perhaps abort here, rather than continue going, but as it stands, + // an abort is not possible the way it's currently written + +====3 +1:525c +2:531c + jPlugin.setEnabled(false); +3:704,708c + try { + jPlugin.setEnabled(false); + } catch (Throwable ex) { + server.getLogger().log(Level.SEVERE, "Error occurred while disabling " + plugin.getDescription().getFullName() + " (Is it up to date?): " + ex.getMessage(), ex); + } diff --git a/src/python/merge_conflict_analysis_diffs/427/git_hires_merge/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/427/git_hires_merge/diff_pom.xml.txt new file mode 100644 index 0000000000..b8eceb3fdc --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/427/git_hires_merge/diff_pom.xml.txt @@ -0,0 +1,27 @@ +====3 +1:9c +2:9c + 0.4.3-SNAPSHOT +3:9c + 0.4.2 +====1 +1:14a +2:15,16c +3:15,16c + 4.0.0.RELEASE + +====1 +1:51a +2:54,55c +3:54,55c + webmagic-panel + webmagic-worker +====1 +1:82a +2:87,91c +3:87,91c + com.alibaba + fastjson + 1.1.37 + + diff --git a/src/python/merge_conflict_analysis_diffs/427/gitmerge_ort/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/427/gitmerge_ort/diff_pom.xml.txt new file mode 100644 index 0000000000..eaaf146419 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/427/gitmerge_ort/diff_pom.xml.txt @@ -0,0 +1,39 @@ +====1 +1:9c + 0.4.3-SNAPSHOT +2:9c +3:9c + 0.4.2 +====1 +1:14a +2:15,16c +3:15,16c + 4.0.0.RELEASE + +====2 +1:50a +3:52a +2:53c + <<<<<<< HEAD +==== +1:51a +2:55,61c + webmagic-panel + webmagic-worker + ||||||| ac516f9b + webmagic-scripts/ + ======= + webmagic-samples/ + >>>>>>> TEMP_RIGHT_BRANCH +3:54,55c + webmagic-panel + webmagic-worker +====1 +1:82a +2:93,97c +3:87,91c + com.alibaba + fastjson + 1.1.37 + + diff --git a/src/python/merge_conflict_analysis_diffs/427/gitmerge_ort_adjacent/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/427/gitmerge_ort_adjacent/diff_pom.xml.txt new file mode 100644 index 0000000000..b8eceb3fdc --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/427/gitmerge_ort_adjacent/diff_pom.xml.txt @@ -0,0 +1,27 @@ +====3 +1:9c +2:9c + 0.4.3-SNAPSHOT +3:9c + 0.4.2 +====1 +1:14a +2:15,16c +3:15,16c + 4.0.0.RELEASE + +====1 +1:51a +2:54,55c +3:54,55c + webmagic-panel + webmagic-worker +====1 +1:82a +2:87,91c +3:87,91c + com.alibaba + fastjson + 1.1.37 + + diff --git a/src/python/merge_conflict_analysis_diffs/427/gitmerge_ort_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/427/gitmerge_ort_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..eaaf146419 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/427/gitmerge_ort_ignorespace/diff_pom.xml.txt @@ -0,0 +1,39 @@ +====1 +1:9c + 0.4.3-SNAPSHOT +2:9c +3:9c + 0.4.2 +====1 +1:14a +2:15,16c +3:15,16c + 4.0.0.RELEASE + +====2 +1:50a +3:52a +2:53c + <<<<<<< HEAD +==== +1:51a +2:55,61c + webmagic-panel + webmagic-worker + ||||||| ac516f9b + webmagic-scripts/ + ======= + webmagic-samples/ + >>>>>>> TEMP_RIGHT_BRANCH +3:54,55c + webmagic-panel + webmagic-worker +====1 +1:82a +2:93,97c +3:87,91c + com.alibaba + fastjson + 1.1.37 + + diff --git a/src/python/merge_conflict_analysis_diffs/427/gitmerge_ort_imports/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/427/gitmerge_ort_imports/diff_pom.xml.txt new file mode 100644 index 0000000000..b8eceb3fdc --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/427/gitmerge_ort_imports/diff_pom.xml.txt @@ -0,0 +1,27 @@ +====3 +1:9c +2:9c + 0.4.3-SNAPSHOT +3:9c + 0.4.2 +====1 +1:14a +2:15,16c +3:15,16c + 4.0.0.RELEASE + +====1 +1:51a +2:54,55c +3:54,55c + webmagic-panel + webmagic-worker +====1 +1:82a +2:87,91c +3:87,91c + com.alibaba + fastjson + 1.1.37 + + diff --git a/src/python/merge_conflict_analysis_diffs/427/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/427/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..b8eceb3fdc --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/427/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt @@ -0,0 +1,27 @@ +====3 +1:9c +2:9c + 0.4.3-SNAPSHOT +3:9c + 0.4.2 +====1 +1:14a +2:15,16c +3:15,16c + 4.0.0.RELEASE + +====1 +1:51a +2:54,55c +3:54,55c + webmagic-panel + webmagic-worker +====1 +1:82a +2:87,91c +3:87,91c + com.alibaba + fastjson + 1.1.37 + + diff --git a/src/python/merge_conflict_analysis_diffs/427/gitmerge_recursive_histogram/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/427/gitmerge_recursive_histogram/diff_pom.xml.txt new file mode 100644 index 0000000000..eaaf146419 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/427/gitmerge_recursive_histogram/diff_pom.xml.txt @@ -0,0 +1,39 @@ +====1 +1:9c + 0.4.3-SNAPSHOT +2:9c +3:9c + 0.4.2 +====1 +1:14a +2:15,16c +3:15,16c + 4.0.0.RELEASE + +====2 +1:50a +3:52a +2:53c + <<<<<<< HEAD +==== +1:51a +2:55,61c + webmagic-panel + webmagic-worker + ||||||| ac516f9b + webmagic-scripts/ + ======= + webmagic-samples/ + >>>>>>> TEMP_RIGHT_BRANCH +3:54,55c + webmagic-panel + webmagic-worker +====1 +1:82a +2:93,97c +3:87,91c + com.alibaba + fastjson + 1.1.37 + + diff --git a/src/python/merge_conflict_analysis_diffs/427/gitmerge_recursive_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/427/gitmerge_recursive_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..eaaf146419 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/427/gitmerge_recursive_ignorespace/diff_pom.xml.txt @@ -0,0 +1,39 @@ +====1 +1:9c + 0.4.3-SNAPSHOT +2:9c +3:9c + 0.4.2 +====1 +1:14a +2:15,16c +3:15,16c + 4.0.0.RELEASE + +====2 +1:50a +3:52a +2:53c + <<<<<<< HEAD +==== +1:51a +2:55,61c + webmagic-panel + webmagic-worker + ||||||| ac516f9b + webmagic-scripts/ + ======= + webmagic-samples/ + >>>>>>> TEMP_RIGHT_BRANCH +3:54,55c + webmagic-panel + webmagic-worker +====1 +1:82a +2:93,97c +3:87,91c + com.alibaba + fastjson + 1.1.37 + + diff --git a/src/python/merge_conflict_analysis_diffs/427/gitmerge_recursive_minimal/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/427/gitmerge_recursive_minimal/diff_pom.xml.txt new file mode 100644 index 0000000000..eaaf146419 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/427/gitmerge_recursive_minimal/diff_pom.xml.txt @@ -0,0 +1,39 @@ +====1 +1:9c + 0.4.3-SNAPSHOT +2:9c +3:9c + 0.4.2 +====1 +1:14a +2:15,16c +3:15,16c + 4.0.0.RELEASE + +====2 +1:50a +3:52a +2:53c + <<<<<<< HEAD +==== +1:51a +2:55,61c + webmagic-panel + webmagic-worker + ||||||| ac516f9b + webmagic-scripts/ + ======= + webmagic-samples/ + >>>>>>> TEMP_RIGHT_BRANCH +3:54,55c + webmagic-panel + webmagic-worker +====1 +1:82a +2:93,97c +3:87,91c + com.alibaba + fastjson + 1.1.37 + + diff --git a/src/python/merge_conflict_analysis_diffs/427/gitmerge_recursive_myers/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/427/gitmerge_recursive_myers/diff_pom.xml.txt new file mode 100644 index 0000000000..eaaf146419 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/427/gitmerge_recursive_myers/diff_pom.xml.txt @@ -0,0 +1,39 @@ +====1 +1:9c + 0.4.3-SNAPSHOT +2:9c +3:9c + 0.4.2 +====1 +1:14a +2:15,16c +3:15,16c + 4.0.0.RELEASE + +====2 +1:50a +3:52a +2:53c + <<<<<<< HEAD +==== +1:51a +2:55,61c + webmagic-panel + webmagic-worker + ||||||| ac516f9b + webmagic-scripts/ + ======= + webmagic-samples/ + >>>>>>> TEMP_RIGHT_BRANCH +3:54,55c + webmagic-panel + webmagic-worker +====1 +1:82a +2:93,97c +3:87,91c + com.alibaba + fastjson + 1.1.37 + + diff --git a/src/python/merge_conflict_analysis_diffs/427/gitmerge_recursive_patience/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/427/gitmerge_recursive_patience/diff_pom.xml.txt new file mode 100644 index 0000000000..eaaf146419 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/427/gitmerge_recursive_patience/diff_pom.xml.txt @@ -0,0 +1,39 @@ +====1 +1:9c + 0.4.3-SNAPSHOT +2:9c +3:9c + 0.4.2 +====1 +1:14a +2:15,16c +3:15,16c + 4.0.0.RELEASE + +====2 +1:50a +3:52a +2:53c + <<<<<<< HEAD +==== +1:51a +2:55,61c + webmagic-panel + webmagic-worker + ||||||| ac516f9b + webmagic-scripts/ + ======= + webmagic-samples/ + >>>>>>> TEMP_RIGHT_BRANCH +3:54,55c + webmagic-panel + webmagic-worker +====1 +1:82a +2:93,97c +3:87,91c + com.alibaba + fastjson + 1.1.37 + + diff --git a/src/python/merge_conflict_analysis_diffs/427/intellimerge/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/427/intellimerge/diff_pom.xml.txt new file mode 100644 index 0000000000..eaaf146419 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/427/intellimerge/diff_pom.xml.txt @@ -0,0 +1,39 @@ +====1 +1:9c + 0.4.3-SNAPSHOT +2:9c +3:9c + 0.4.2 +====1 +1:14a +2:15,16c +3:15,16c + 4.0.0.RELEASE + +====2 +1:50a +3:52a +2:53c + <<<<<<< HEAD +==== +1:51a +2:55,61c + webmagic-panel + webmagic-worker + ||||||| ac516f9b + webmagic-scripts/ + ======= + webmagic-samples/ + >>>>>>> TEMP_RIGHT_BRANCH +3:54,55c + webmagic-panel + webmagic-worker +====1 +1:82a +2:93,97c +3:87,91c + com.alibaba + fastjson + 1.1.37 + + diff --git a/src/python/merge_conflict_analysis_diffs/427/spork/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/427/spork/diff_pom.xml.txt new file mode 100644 index 0000000000..b8eceb3fdc --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/427/spork/diff_pom.xml.txt @@ -0,0 +1,27 @@ +====3 +1:9c +2:9c + 0.4.3-SNAPSHOT +3:9c + 0.4.2 +====1 +1:14a +2:15,16c +3:15,16c + 4.0.0.RELEASE + +====1 +1:51a +2:54,55c +3:54,55c + webmagic-panel + webmagic-worker +====1 +1:82a +2:87,91c +3:87,91c + com.alibaba + fastjson + 1.1.37 + + diff --git a/src/python/merge_conflict_analysis_diffs/464/git_hires_merge/diff_Faker.java.txt b/src/python/merge_conflict_analysis_diffs/464/git_hires_merge/diff_Faker.java.txt new file mode 100644 index 0000000000..71c96fb04a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/git_hires_merge/diff_Faker.java.txt @@ -0,0 +1,34 @@ +==== +1:99a +2:100c + private final BojackHorseman bojackHorseman; +3:100,101c + private final BojackHorseman bojackHorseman; + private final Basketball basketball; +==== +1:205a +2:207c + this.bojackHorseman = new BojackHorseman(this); +3:208,209c + this.bojackHorseman = new BojackHorseman(this); + this.basketball = new Basketball(this); +====1 +1:494,496c + public Beer beer() { + return beer; + } +2:496c +3:498c + public Beer beer() { return beer; } +==== +1:643a +2:644,647c + + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + +3:646,649c + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + public Basketball basketball() { return basketball; } + diff --git a/src/python/merge_conflict_analysis_diffs/464/git_hires_merge/diff_FakerIT.java.txt b/src/python/merge_conflict_analysis_diffs/464/git_hires_merge/diff_FakerIT.java.txt new file mode 100644 index 0000000000..768403923a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/git_hires_merge/diff_FakerIT.java.txt @@ -0,0 +1,7 @@ +==== +1:165a +2:166c + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); +3:166,167c + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.basketball()); diff --git a/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort/diff_Faker.java.txt b/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort/diff_Faker.java.txt new file mode 100644 index 0000000000..6e94f90989 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort/diff_Faker.java.txt @@ -0,0 +1,49 @@ +==== +1:99a +2:100,105c + <<<<<<< HEAD + private final BojackHorseman bojackHorseman; + ||||||| 1850b033 + ======= + private final Basketball basketball; + >>>>>>> TEMP_RIGHT_BRANCH +3:100,101c + private final BojackHorseman bojackHorseman; + private final Basketball basketball; +==== +1:205a +2:212,217c + <<<<<<< HEAD + this.bojackHorseman = new BojackHorseman(this); + ||||||| 1850b033 + ======= + this.basketball = new Basketball(this); + >>>>>>> TEMP_RIGHT_BRANCH +3:208,209c + this.bojackHorseman = new BojackHorseman(this); + this.basketball = new Basketball(this); +====1 +1:494,496c + public Beer beer() { + return beer; + } +2:506c +3:498c + public Beer beer() { return beer; } +==== +1:643a +2:654,662c + <<<<<<< HEAD + + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + + ||||||| 1850b033 + ======= + public Basketball basketball() { return basketball; } + >>>>>>> TEMP_RIGHT_BRANCH +3:646,649c + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + public Basketball basketball() { return basketball; } + diff --git a/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort/diff_FakerIT.java.txt b/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort/diff_FakerIT.java.txt new file mode 100644 index 0000000000..ede5262c29 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort/diff_FakerIT.java.txt @@ -0,0 +1,12 @@ +==== +1:165a +2:166,171c + <<<<<<< HEAD + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); + ||||||| 1850b033 + ======= + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.basketball()); + >>>>>>> TEMP_RIGHT_BRANCH +3:166,167c + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.basketball()); diff --git a/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_adjacent/diff_Faker.java.txt b/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_adjacent/diff_Faker.java.txt new file mode 100644 index 0000000000..71c96fb04a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_adjacent/diff_Faker.java.txt @@ -0,0 +1,34 @@ +==== +1:99a +2:100c + private final BojackHorseman bojackHorseman; +3:100,101c + private final BojackHorseman bojackHorseman; + private final Basketball basketball; +==== +1:205a +2:207c + this.bojackHorseman = new BojackHorseman(this); +3:208,209c + this.bojackHorseman = new BojackHorseman(this); + this.basketball = new Basketball(this); +====1 +1:494,496c + public Beer beer() { + return beer; + } +2:496c +3:498c + public Beer beer() { return beer; } +==== +1:643a +2:644,647c + + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + +3:646,649c + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + public Basketball basketball() { return basketball; } + diff --git a/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_adjacent/diff_FakerIT.java.txt b/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_adjacent/diff_FakerIT.java.txt new file mode 100644 index 0000000000..768403923a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_adjacent/diff_FakerIT.java.txt @@ -0,0 +1,7 @@ +==== +1:165a +2:166c + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); +3:166,167c + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.basketball()); diff --git a/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_ignorespace/diff_Faker.java.txt b/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_ignorespace/diff_Faker.java.txt new file mode 100644 index 0000000000..6e94f90989 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_ignorespace/diff_Faker.java.txt @@ -0,0 +1,49 @@ +==== +1:99a +2:100,105c + <<<<<<< HEAD + private final BojackHorseman bojackHorseman; + ||||||| 1850b033 + ======= + private final Basketball basketball; + >>>>>>> TEMP_RIGHT_BRANCH +3:100,101c + private final BojackHorseman bojackHorseman; + private final Basketball basketball; +==== +1:205a +2:212,217c + <<<<<<< HEAD + this.bojackHorseman = new BojackHorseman(this); + ||||||| 1850b033 + ======= + this.basketball = new Basketball(this); + >>>>>>> TEMP_RIGHT_BRANCH +3:208,209c + this.bojackHorseman = new BojackHorseman(this); + this.basketball = new Basketball(this); +====1 +1:494,496c + public Beer beer() { + return beer; + } +2:506c +3:498c + public Beer beer() { return beer; } +==== +1:643a +2:654,662c + <<<<<<< HEAD + + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + + ||||||| 1850b033 + ======= + public Basketball basketball() { return basketball; } + >>>>>>> TEMP_RIGHT_BRANCH +3:646,649c + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + public Basketball basketball() { return basketball; } + diff --git a/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_ignorespace/diff_FakerIT.java.txt b/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_ignorespace/diff_FakerIT.java.txt new file mode 100644 index 0000000000..ede5262c29 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_ignorespace/diff_FakerIT.java.txt @@ -0,0 +1,12 @@ +==== +1:165a +2:166,171c + <<<<<<< HEAD + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); + ||||||| 1850b033 + ======= + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.basketball()); + >>>>>>> TEMP_RIGHT_BRANCH +3:166,167c + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.basketball()); diff --git a/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_imports/diff_Faker.java.txt b/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_imports/diff_Faker.java.txt new file mode 100644 index 0000000000..71c96fb04a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_imports/diff_Faker.java.txt @@ -0,0 +1,34 @@ +==== +1:99a +2:100c + private final BojackHorseman bojackHorseman; +3:100,101c + private final BojackHorseman bojackHorseman; + private final Basketball basketball; +==== +1:205a +2:207c + this.bojackHorseman = new BojackHorseman(this); +3:208,209c + this.bojackHorseman = new BojackHorseman(this); + this.basketball = new Basketball(this); +====1 +1:494,496c + public Beer beer() { + return beer; + } +2:496c +3:498c + public Beer beer() { return beer; } +==== +1:643a +2:644,647c + + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + +3:646,649c + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + public Basketball basketball() { return basketball; } + diff --git a/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_imports/diff_FakerIT.java.txt b/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_imports/diff_FakerIT.java.txt new file mode 100644 index 0000000000..768403923a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_imports/diff_FakerIT.java.txt @@ -0,0 +1,7 @@ +==== +1:165a +2:166c + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); +3:166,167c + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.basketball()); diff --git a/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_imports_ignorespace/diff_Faker.java.txt b/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_imports_ignorespace/diff_Faker.java.txt new file mode 100644 index 0000000000..71c96fb04a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_imports_ignorespace/diff_Faker.java.txt @@ -0,0 +1,34 @@ +==== +1:99a +2:100c + private final BojackHorseman bojackHorseman; +3:100,101c + private final BojackHorseman bojackHorseman; + private final Basketball basketball; +==== +1:205a +2:207c + this.bojackHorseman = new BojackHorseman(this); +3:208,209c + this.bojackHorseman = new BojackHorseman(this); + this.basketball = new Basketball(this); +====1 +1:494,496c + public Beer beer() { + return beer; + } +2:496c +3:498c + public Beer beer() { return beer; } +==== +1:643a +2:644,647c + + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + +3:646,649c + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + public Basketball basketball() { return basketball; } + diff --git a/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_imports_ignorespace/diff_FakerIT.java.txt b/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_imports_ignorespace/diff_FakerIT.java.txt new file mode 100644 index 0000000000..768403923a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/gitmerge_ort_imports_ignorespace/diff_FakerIT.java.txt @@ -0,0 +1,7 @@ +==== +1:165a +2:166c + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); +3:166,167c + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.basketball()); diff --git a/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_histogram/diff_Faker.java.txt b/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_histogram/diff_Faker.java.txt new file mode 100644 index 0000000000..6e94f90989 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_histogram/diff_Faker.java.txt @@ -0,0 +1,49 @@ +==== +1:99a +2:100,105c + <<<<<<< HEAD + private final BojackHorseman bojackHorseman; + ||||||| 1850b033 + ======= + private final Basketball basketball; + >>>>>>> TEMP_RIGHT_BRANCH +3:100,101c + private final BojackHorseman bojackHorseman; + private final Basketball basketball; +==== +1:205a +2:212,217c + <<<<<<< HEAD + this.bojackHorseman = new BojackHorseman(this); + ||||||| 1850b033 + ======= + this.basketball = new Basketball(this); + >>>>>>> TEMP_RIGHT_BRANCH +3:208,209c + this.bojackHorseman = new BojackHorseman(this); + this.basketball = new Basketball(this); +====1 +1:494,496c + public Beer beer() { + return beer; + } +2:506c +3:498c + public Beer beer() { return beer; } +==== +1:643a +2:654,662c + <<<<<<< HEAD + + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + + ||||||| 1850b033 + ======= + public Basketball basketball() { return basketball; } + >>>>>>> TEMP_RIGHT_BRANCH +3:646,649c + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + public Basketball basketball() { return basketball; } + diff --git a/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_histogram/diff_FakerIT.java.txt b/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_histogram/diff_FakerIT.java.txt new file mode 100644 index 0000000000..ede5262c29 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_histogram/diff_FakerIT.java.txt @@ -0,0 +1,12 @@ +==== +1:165a +2:166,171c + <<<<<<< HEAD + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); + ||||||| 1850b033 + ======= + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.basketball()); + >>>>>>> TEMP_RIGHT_BRANCH +3:166,167c + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.basketball()); diff --git a/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_ignorespace/diff_Faker.java.txt b/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_ignorespace/diff_Faker.java.txt new file mode 100644 index 0000000000..6e94f90989 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_ignorespace/diff_Faker.java.txt @@ -0,0 +1,49 @@ +==== +1:99a +2:100,105c + <<<<<<< HEAD + private final BojackHorseman bojackHorseman; + ||||||| 1850b033 + ======= + private final Basketball basketball; + >>>>>>> TEMP_RIGHT_BRANCH +3:100,101c + private final BojackHorseman bojackHorseman; + private final Basketball basketball; +==== +1:205a +2:212,217c + <<<<<<< HEAD + this.bojackHorseman = new BojackHorseman(this); + ||||||| 1850b033 + ======= + this.basketball = new Basketball(this); + >>>>>>> TEMP_RIGHT_BRANCH +3:208,209c + this.bojackHorseman = new BojackHorseman(this); + this.basketball = new Basketball(this); +====1 +1:494,496c + public Beer beer() { + return beer; + } +2:506c +3:498c + public Beer beer() { return beer; } +==== +1:643a +2:654,662c + <<<<<<< HEAD + + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + + ||||||| 1850b033 + ======= + public Basketball basketball() { return basketball; } + >>>>>>> TEMP_RIGHT_BRANCH +3:646,649c + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + public Basketball basketball() { return basketball; } + diff --git a/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_ignorespace/diff_FakerIT.java.txt b/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_ignorespace/diff_FakerIT.java.txt new file mode 100644 index 0000000000..ede5262c29 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_ignorespace/diff_FakerIT.java.txt @@ -0,0 +1,12 @@ +==== +1:165a +2:166,171c + <<<<<<< HEAD + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); + ||||||| 1850b033 + ======= + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.basketball()); + >>>>>>> TEMP_RIGHT_BRANCH +3:166,167c + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.basketball()); diff --git a/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_minimal/diff_Faker.java.txt b/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_minimal/diff_Faker.java.txt new file mode 100644 index 0000000000..6e94f90989 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_minimal/diff_Faker.java.txt @@ -0,0 +1,49 @@ +==== +1:99a +2:100,105c + <<<<<<< HEAD + private final BojackHorseman bojackHorseman; + ||||||| 1850b033 + ======= + private final Basketball basketball; + >>>>>>> TEMP_RIGHT_BRANCH +3:100,101c + private final BojackHorseman bojackHorseman; + private final Basketball basketball; +==== +1:205a +2:212,217c + <<<<<<< HEAD + this.bojackHorseman = new BojackHorseman(this); + ||||||| 1850b033 + ======= + this.basketball = new Basketball(this); + >>>>>>> TEMP_RIGHT_BRANCH +3:208,209c + this.bojackHorseman = new BojackHorseman(this); + this.basketball = new Basketball(this); +====1 +1:494,496c + public Beer beer() { + return beer; + } +2:506c +3:498c + public Beer beer() { return beer; } +==== +1:643a +2:654,662c + <<<<<<< HEAD + + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + + ||||||| 1850b033 + ======= + public Basketball basketball() { return basketball; } + >>>>>>> TEMP_RIGHT_BRANCH +3:646,649c + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + public Basketball basketball() { return basketball; } + diff --git a/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_minimal/diff_FakerIT.java.txt b/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_minimal/diff_FakerIT.java.txt new file mode 100644 index 0000000000..ede5262c29 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_minimal/diff_FakerIT.java.txt @@ -0,0 +1,12 @@ +==== +1:165a +2:166,171c + <<<<<<< HEAD + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); + ||||||| 1850b033 + ======= + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.basketball()); + >>>>>>> TEMP_RIGHT_BRANCH +3:166,167c + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.basketball()); diff --git a/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_myers/diff_Faker.java.txt b/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_myers/diff_Faker.java.txt new file mode 100644 index 0000000000..6e94f90989 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_myers/diff_Faker.java.txt @@ -0,0 +1,49 @@ +==== +1:99a +2:100,105c + <<<<<<< HEAD + private final BojackHorseman bojackHorseman; + ||||||| 1850b033 + ======= + private final Basketball basketball; + >>>>>>> TEMP_RIGHT_BRANCH +3:100,101c + private final BojackHorseman bojackHorseman; + private final Basketball basketball; +==== +1:205a +2:212,217c + <<<<<<< HEAD + this.bojackHorseman = new BojackHorseman(this); + ||||||| 1850b033 + ======= + this.basketball = new Basketball(this); + >>>>>>> TEMP_RIGHT_BRANCH +3:208,209c + this.bojackHorseman = new BojackHorseman(this); + this.basketball = new Basketball(this); +====1 +1:494,496c + public Beer beer() { + return beer; + } +2:506c +3:498c + public Beer beer() { return beer; } +==== +1:643a +2:654,662c + <<<<<<< HEAD + + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + + ||||||| 1850b033 + ======= + public Basketball basketball() { return basketball; } + >>>>>>> TEMP_RIGHT_BRANCH +3:646,649c + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + public Basketball basketball() { return basketball; } + diff --git a/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_myers/diff_FakerIT.java.txt b/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_myers/diff_FakerIT.java.txt new file mode 100644 index 0000000000..ede5262c29 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_myers/diff_FakerIT.java.txt @@ -0,0 +1,12 @@ +==== +1:165a +2:166,171c + <<<<<<< HEAD + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); + ||||||| 1850b033 + ======= + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.basketball()); + >>>>>>> TEMP_RIGHT_BRANCH +3:166,167c + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.basketball()); diff --git a/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_patience/diff_Faker.java.txt b/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_patience/diff_Faker.java.txt new file mode 100644 index 0000000000..6e94f90989 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_patience/diff_Faker.java.txt @@ -0,0 +1,49 @@ +==== +1:99a +2:100,105c + <<<<<<< HEAD + private final BojackHorseman bojackHorseman; + ||||||| 1850b033 + ======= + private final Basketball basketball; + >>>>>>> TEMP_RIGHT_BRANCH +3:100,101c + private final BojackHorseman bojackHorseman; + private final Basketball basketball; +==== +1:205a +2:212,217c + <<<<<<< HEAD + this.bojackHorseman = new BojackHorseman(this); + ||||||| 1850b033 + ======= + this.basketball = new Basketball(this); + >>>>>>> TEMP_RIGHT_BRANCH +3:208,209c + this.bojackHorseman = new BojackHorseman(this); + this.basketball = new Basketball(this); +====1 +1:494,496c + public Beer beer() { + return beer; + } +2:506c +3:498c + public Beer beer() { return beer; } +==== +1:643a +2:654,662c + <<<<<<< HEAD + + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + + ||||||| 1850b033 + ======= + public Basketball basketball() { return basketball; } + >>>>>>> TEMP_RIGHT_BRANCH +3:646,649c + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + public Basketball basketball() { return basketball; } + diff --git a/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_patience/diff_FakerIT.java.txt b/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_patience/diff_FakerIT.java.txt new file mode 100644 index 0000000000..ede5262c29 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/gitmerge_recursive_patience/diff_FakerIT.java.txt @@ -0,0 +1,12 @@ +==== +1:165a +2:166,171c + <<<<<<< HEAD + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); + ||||||| 1850b033 + ======= + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.basketball()); + >>>>>>> TEMP_RIGHT_BRANCH +3:166,167c + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.basketball()); diff --git a/src/python/merge_conflict_analysis_diffs/464/intellimerge/diff_Faker.java.txt b/src/python/merge_conflict_analysis_diffs/464/intellimerge/diff_Faker.java.txt new file mode 100644 index 0000000000..6e94f90989 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/intellimerge/diff_Faker.java.txt @@ -0,0 +1,49 @@ +==== +1:99a +2:100,105c + <<<<<<< HEAD + private final BojackHorseman bojackHorseman; + ||||||| 1850b033 + ======= + private final Basketball basketball; + >>>>>>> TEMP_RIGHT_BRANCH +3:100,101c + private final BojackHorseman bojackHorseman; + private final Basketball basketball; +==== +1:205a +2:212,217c + <<<<<<< HEAD + this.bojackHorseman = new BojackHorseman(this); + ||||||| 1850b033 + ======= + this.basketball = new Basketball(this); + >>>>>>> TEMP_RIGHT_BRANCH +3:208,209c + this.bojackHorseman = new BojackHorseman(this); + this.basketball = new Basketball(this); +====1 +1:494,496c + public Beer beer() { + return beer; + } +2:506c +3:498c + public Beer beer() { return beer; } +==== +1:643a +2:654,662c + <<<<<<< HEAD + + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + + ||||||| 1850b033 + ======= + public Basketball basketball() { return basketball; } + >>>>>>> TEMP_RIGHT_BRANCH +3:646,649c + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + public Basketball basketball() { return basketball; } + diff --git a/src/python/merge_conflict_analysis_diffs/464/intellimerge/diff_FakerIT.java.txt b/src/python/merge_conflict_analysis_diffs/464/intellimerge/diff_FakerIT.java.txt new file mode 100644 index 0000000000..ede5262c29 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/intellimerge/diff_FakerIT.java.txt @@ -0,0 +1,12 @@ +==== +1:165a +2:166,171c + <<<<<<< HEAD + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); + ||||||| 1850b033 + ======= + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.basketball()); + >>>>>>> TEMP_RIGHT_BRANCH +3:166,167c + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.basketball()); diff --git a/src/python/merge_conflict_analysis_diffs/464/spork/diff_Faker.java.txt b/src/python/merge_conflict_analysis_diffs/464/spork/diff_Faker.java.txt new file mode 100644 index 0000000000..71c96fb04a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/spork/diff_Faker.java.txt @@ -0,0 +1,34 @@ +==== +1:99a +2:100c + private final BojackHorseman bojackHorseman; +3:100,101c + private final BojackHorseman bojackHorseman; + private final Basketball basketball; +==== +1:205a +2:207c + this.bojackHorseman = new BojackHorseman(this); +3:208,209c + this.bojackHorseman = new BojackHorseman(this); + this.basketball = new Basketball(this); +====1 +1:494,496c + public Beer beer() { + return beer; + } +2:496c +3:498c + public Beer beer() { return beer; } +==== +1:643a +2:644,647c + + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + +3:646,649c + public BojackHorseman bojackHorseman() { return bojackHorseman; } + + public Basketball basketball() { return basketball; } + diff --git a/src/python/merge_conflict_analysis_diffs/464/spork/diff_FakerIT.java.txt b/src/python/merge_conflict_analysis_diffs/464/spork/diff_FakerIT.java.txt new file mode 100644 index 0000000000..768403923a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/464/spork/diff_FakerIT.java.txt @@ -0,0 +1,7 @@ +==== +1:165a +2:166c + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); +3:166,167c + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.bojackHorseman()); + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.basketball()); diff --git a/src/python/merge_conflict_analysis_diffs/485/git_hires_merge/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/485/git_hires_merge/diff_README.md.txt new file mode 100644 index 0000000000..a46efed48d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/485/git_hires_merge/diff_README.md.txt @@ -0,0 +1,39 @@ +====3 +1:20c +2:20c + 0.18 +3:20c + 1.0.0 +====3 +1:28c +2:28c + implementation 'com.github.javafaker:javafaker:0.18' +3:28c + implementation 'com.github.javafaker:javafaker:1.0.0' +====3 +1:44a +2:44a +3:45,46c + This is a [demo web application](https://java-faker.herokuapp.com/) that uses the library. + +====3 +1:49a +2:49a +3:52,56c + Contributions + ------------- + See [CONTRIBUTING.md](https://github.com/DiUS/java-faker/blob/master/CONTRIBUTING.md) + + +==== +1:56a +2:57c + * Back To The Future +3:64,65c + * Back To The Future + * Aviation +====1 +1:100a +2:102c +3:110c + * Princess Bride diff --git a/src/python/merge_conflict_analysis_diffs/485/gitmerge_ort/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/485/gitmerge_ort/diff_README.md.txt new file mode 100644 index 0000000000..03f94f7c50 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/485/gitmerge_ort/diff_README.md.txt @@ -0,0 +1,44 @@ +====1 +1:20c + 0.18 +2:20c +3:20c + 1.0.0 +====1 +1:28c + implementation 'com.github.javafaker:javafaker:0.18' +2:28c +3:28c + implementation 'com.github.javafaker:javafaker:1.0.0' +====1 +1:44a +2:45,46c +3:45,46c + This is a [demo web application](https://java-faker.herokuapp.com/) that uses the library. + +====1 +1:49a +2:52,56c +3:52,56c + Contributions + ------------- + See [CONTRIBUTING.md](https://github.com/DiUS/java-faker/blob/master/CONTRIBUTING.md) + + +==== +1:56a +2:64,69c + <<<<<<< HEAD + * Back To The Future + ||||||| 506d0729 + ======= + * Aviation + >>>>>>> TEMP_RIGHT_BRANCH +3:64,65c + * Back To The Future + * Aviation +====1 +1:100a +2:114c +3:110c + * Princess Bride diff --git a/src/python/merge_conflict_analysis_diffs/485/gitmerge_ort_adjacent/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/485/gitmerge_ort_adjacent/diff_README.md.txt new file mode 100644 index 0000000000..a46efed48d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/485/gitmerge_ort_adjacent/diff_README.md.txt @@ -0,0 +1,39 @@ +====3 +1:20c +2:20c + 0.18 +3:20c + 1.0.0 +====3 +1:28c +2:28c + implementation 'com.github.javafaker:javafaker:0.18' +3:28c + implementation 'com.github.javafaker:javafaker:1.0.0' +====3 +1:44a +2:44a +3:45,46c + This is a [demo web application](https://java-faker.herokuapp.com/) that uses the library. + +====3 +1:49a +2:49a +3:52,56c + Contributions + ------------- + See [CONTRIBUTING.md](https://github.com/DiUS/java-faker/blob/master/CONTRIBUTING.md) + + +==== +1:56a +2:57c + * Back To The Future +3:64,65c + * Back To The Future + * Aviation +====1 +1:100a +2:102c +3:110c + * Princess Bride diff --git a/src/python/merge_conflict_analysis_diffs/485/gitmerge_ort_ignorespace/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/485/gitmerge_ort_ignorespace/diff_README.md.txt new file mode 100644 index 0000000000..03f94f7c50 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/485/gitmerge_ort_ignorespace/diff_README.md.txt @@ -0,0 +1,44 @@ +====1 +1:20c + 0.18 +2:20c +3:20c + 1.0.0 +====1 +1:28c + implementation 'com.github.javafaker:javafaker:0.18' +2:28c +3:28c + implementation 'com.github.javafaker:javafaker:1.0.0' +====1 +1:44a +2:45,46c +3:45,46c + This is a [demo web application](https://java-faker.herokuapp.com/) that uses the library. + +====1 +1:49a +2:52,56c +3:52,56c + Contributions + ------------- + See [CONTRIBUTING.md](https://github.com/DiUS/java-faker/blob/master/CONTRIBUTING.md) + + +==== +1:56a +2:64,69c + <<<<<<< HEAD + * Back To The Future + ||||||| 506d0729 + ======= + * Aviation + >>>>>>> TEMP_RIGHT_BRANCH +3:64,65c + * Back To The Future + * Aviation +====1 +1:100a +2:114c +3:110c + * Princess Bride diff --git a/src/python/merge_conflict_analysis_diffs/485/gitmerge_ort_imports/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/485/gitmerge_ort_imports/diff_README.md.txt new file mode 100644 index 0000000000..a46efed48d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/485/gitmerge_ort_imports/diff_README.md.txt @@ -0,0 +1,39 @@ +====3 +1:20c +2:20c + 0.18 +3:20c + 1.0.0 +====3 +1:28c +2:28c + implementation 'com.github.javafaker:javafaker:0.18' +3:28c + implementation 'com.github.javafaker:javafaker:1.0.0' +====3 +1:44a +2:44a +3:45,46c + This is a [demo web application](https://java-faker.herokuapp.com/) that uses the library. + +====3 +1:49a +2:49a +3:52,56c + Contributions + ------------- + See [CONTRIBUTING.md](https://github.com/DiUS/java-faker/blob/master/CONTRIBUTING.md) + + +==== +1:56a +2:57c + * Back To The Future +3:64,65c + * Back To The Future + * Aviation +====1 +1:100a +2:102c +3:110c + * Princess Bride diff --git a/src/python/merge_conflict_analysis_diffs/485/gitmerge_ort_imports_ignorespace/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/485/gitmerge_ort_imports_ignorespace/diff_README.md.txt new file mode 100644 index 0000000000..a46efed48d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/485/gitmerge_ort_imports_ignorespace/diff_README.md.txt @@ -0,0 +1,39 @@ +====3 +1:20c +2:20c + 0.18 +3:20c + 1.0.0 +====3 +1:28c +2:28c + implementation 'com.github.javafaker:javafaker:0.18' +3:28c + implementation 'com.github.javafaker:javafaker:1.0.0' +====3 +1:44a +2:44a +3:45,46c + This is a [demo web application](https://java-faker.herokuapp.com/) that uses the library. + +====3 +1:49a +2:49a +3:52,56c + Contributions + ------------- + See [CONTRIBUTING.md](https://github.com/DiUS/java-faker/blob/master/CONTRIBUTING.md) + + +==== +1:56a +2:57c + * Back To The Future +3:64,65c + * Back To The Future + * Aviation +====1 +1:100a +2:102c +3:110c + * Princess Bride diff --git a/src/python/merge_conflict_analysis_diffs/485/gitmerge_recursive_histogram/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/485/gitmerge_recursive_histogram/diff_README.md.txt new file mode 100644 index 0000000000..03f94f7c50 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/485/gitmerge_recursive_histogram/diff_README.md.txt @@ -0,0 +1,44 @@ +====1 +1:20c + 0.18 +2:20c +3:20c + 1.0.0 +====1 +1:28c + implementation 'com.github.javafaker:javafaker:0.18' +2:28c +3:28c + implementation 'com.github.javafaker:javafaker:1.0.0' +====1 +1:44a +2:45,46c +3:45,46c + This is a [demo web application](https://java-faker.herokuapp.com/) that uses the library. + +====1 +1:49a +2:52,56c +3:52,56c + Contributions + ------------- + See [CONTRIBUTING.md](https://github.com/DiUS/java-faker/blob/master/CONTRIBUTING.md) + + +==== +1:56a +2:64,69c + <<<<<<< HEAD + * Back To The Future + ||||||| 506d0729 + ======= + * Aviation + >>>>>>> TEMP_RIGHT_BRANCH +3:64,65c + * Back To The Future + * Aviation +====1 +1:100a +2:114c +3:110c + * Princess Bride diff --git a/src/python/merge_conflict_analysis_diffs/485/gitmerge_recursive_ignorespace/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/485/gitmerge_recursive_ignorespace/diff_README.md.txt new file mode 100644 index 0000000000..03f94f7c50 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/485/gitmerge_recursive_ignorespace/diff_README.md.txt @@ -0,0 +1,44 @@ +====1 +1:20c + 0.18 +2:20c +3:20c + 1.0.0 +====1 +1:28c + implementation 'com.github.javafaker:javafaker:0.18' +2:28c +3:28c + implementation 'com.github.javafaker:javafaker:1.0.0' +====1 +1:44a +2:45,46c +3:45,46c + This is a [demo web application](https://java-faker.herokuapp.com/) that uses the library. + +====1 +1:49a +2:52,56c +3:52,56c + Contributions + ------------- + See [CONTRIBUTING.md](https://github.com/DiUS/java-faker/blob/master/CONTRIBUTING.md) + + +==== +1:56a +2:64,69c + <<<<<<< HEAD + * Back To The Future + ||||||| 506d0729 + ======= + * Aviation + >>>>>>> TEMP_RIGHT_BRANCH +3:64,65c + * Back To The Future + * Aviation +====1 +1:100a +2:114c +3:110c + * Princess Bride diff --git a/src/python/merge_conflict_analysis_diffs/485/gitmerge_recursive_minimal/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/485/gitmerge_recursive_minimal/diff_README.md.txt new file mode 100644 index 0000000000..03f94f7c50 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/485/gitmerge_recursive_minimal/diff_README.md.txt @@ -0,0 +1,44 @@ +====1 +1:20c + 0.18 +2:20c +3:20c + 1.0.0 +====1 +1:28c + implementation 'com.github.javafaker:javafaker:0.18' +2:28c +3:28c + implementation 'com.github.javafaker:javafaker:1.0.0' +====1 +1:44a +2:45,46c +3:45,46c + This is a [demo web application](https://java-faker.herokuapp.com/) that uses the library. + +====1 +1:49a +2:52,56c +3:52,56c + Contributions + ------------- + See [CONTRIBUTING.md](https://github.com/DiUS/java-faker/blob/master/CONTRIBUTING.md) + + +==== +1:56a +2:64,69c + <<<<<<< HEAD + * Back To The Future + ||||||| 506d0729 + ======= + * Aviation + >>>>>>> TEMP_RIGHT_BRANCH +3:64,65c + * Back To The Future + * Aviation +====1 +1:100a +2:114c +3:110c + * Princess Bride diff --git a/src/python/merge_conflict_analysis_diffs/485/gitmerge_recursive_myers/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/485/gitmerge_recursive_myers/diff_README.md.txt new file mode 100644 index 0000000000..03f94f7c50 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/485/gitmerge_recursive_myers/diff_README.md.txt @@ -0,0 +1,44 @@ +====1 +1:20c + 0.18 +2:20c +3:20c + 1.0.0 +====1 +1:28c + implementation 'com.github.javafaker:javafaker:0.18' +2:28c +3:28c + implementation 'com.github.javafaker:javafaker:1.0.0' +====1 +1:44a +2:45,46c +3:45,46c + This is a [demo web application](https://java-faker.herokuapp.com/) that uses the library. + +====1 +1:49a +2:52,56c +3:52,56c + Contributions + ------------- + See [CONTRIBUTING.md](https://github.com/DiUS/java-faker/blob/master/CONTRIBUTING.md) + + +==== +1:56a +2:64,69c + <<<<<<< HEAD + * Back To The Future + ||||||| 506d0729 + ======= + * Aviation + >>>>>>> TEMP_RIGHT_BRANCH +3:64,65c + * Back To The Future + * Aviation +====1 +1:100a +2:114c +3:110c + * Princess Bride diff --git a/src/python/merge_conflict_analysis_diffs/485/gitmerge_recursive_patience/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/485/gitmerge_recursive_patience/diff_README.md.txt new file mode 100644 index 0000000000..03f94f7c50 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/485/gitmerge_recursive_patience/diff_README.md.txt @@ -0,0 +1,44 @@ +====1 +1:20c + 0.18 +2:20c +3:20c + 1.0.0 +====1 +1:28c + implementation 'com.github.javafaker:javafaker:0.18' +2:28c +3:28c + implementation 'com.github.javafaker:javafaker:1.0.0' +====1 +1:44a +2:45,46c +3:45,46c + This is a [demo web application](https://java-faker.herokuapp.com/) that uses the library. + +====1 +1:49a +2:52,56c +3:52,56c + Contributions + ------------- + See [CONTRIBUTING.md](https://github.com/DiUS/java-faker/blob/master/CONTRIBUTING.md) + + +==== +1:56a +2:64,69c + <<<<<<< HEAD + * Back To The Future + ||||||| 506d0729 + ======= + * Aviation + >>>>>>> TEMP_RIGHT_BRANCH +3:64,65c + * Back To The Future + * Aviation +====1 +1:100a +2:114c +3:110c + * Princess Bride diff --git a/src/python/merge_conflict_analysis_diffs/485/intellimerge/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/485/intellimerge/diff_README.md.txt new file mode 100644 index 0000000000..03f94f7c50 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/485/intellimerge/diff_README.md.txt @@ -0,0 +1,44 @@ +====1 +1:20c + 0.18 +2:20c +3:20c + 1.0.0 +====1 +1:28c + implementation 'com.github.javafaker:javafaker:0.18' +2:28c +3:28c + implementation 'com.github.javafaker:javafaker:1.0.0' +====1 +1:44a +2:45,46c +3:45,46c + This is a [demo web application](https://java-faker.herokuapp.com/) that uses the library. + +====1 +1:49a +2:52,56c +3:52,56c + Contributions + ------------- + See [CONTRIBUTING.md](https://github.com/DiUS/java-faker/blob/master/CONTRIBUTING.md) + + +==== +1:56a +2:64,69c + <<<<<<< HEAD + * Back To The Future + ||||||| 506d0729 + ======= + * Aviation + >>>>>>> TEMP_RIGHT_BRANCH +3:64,65c + * Back To The Future + * Aviation +====1 +1:100a +2:114c +3:110c + * Princess Bride diff --git a/src/python/merge_conflict_analysis_diffs/485/spork/diff_Faker.java.txt b/src/python/merge_conflict_analysis_diffs/485/spork/diff_Faker.java.txt new file mode 100644 index 0000000000..630c7ecb73 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/485/spork/diff_Faker.java.txt @@ -0,0 +1,36 @@ +====3 +1:22a +2:22a +3:23c + private final Aviation aviation; +====1 +1:87a +2:88c +3:89c + private final PrincessBride princessBride; +====3 +1:108a +2:109a +3:111c + this.aviation = new Aviation(this); +====1 +1:173a +2:175c +3:177c + this.princessBride = new PrincessBride(this); +====3 +1:313a +2:315a +3:318,321c + public Aviation aviation() { + return aviation; + } + +====1 +1:565a +2:568,571c +3:574,577c + public PrincessBride princessBride() { + return princessBride; + } + diff --git a/src/python/merge_conflict_analysis_diffs/485/spork/diff_FakerIT.java.txt b/src/python/merge_conflict_analysis_diffs/485/spork/diff_FakerIT.java.txt new file mode 100644 index 0000000000..c5fdf40037 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/485/spork/diff_FakerIT.java.txt @@ -0,0 +1,10 @@ +====3 +1:96a +2:96a +3:97c + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.aviation()); +====1 +1:153a +2:154c +3:155c + testAllMethodsThatReturnStringsActuallyReturnStrings(faker.princessBride()); diff --git a/src/python/merge_conflict_analysis_diffs/485/spork/diff_README.md.txt b/src/python/merge_conflict_analysis_diffs/485/spork/diff_README.md.txt new file mode 100644 index 0000000000..a46efed48d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/485/spork/diff_README.md.txt @@ -0,0 +1,39 @@ +====3 +1:20c +2:20c + 0.18 +3:20c + 1.0.0 +====3 +1:28c +2:28c + implementation 'com.github.javafaker:javafaker:0.18' +3:28c + implementation 'com.github.javafaker:javafaker:1.0.0' +====3 +1:44a +2:44a +3:45,46c + This is a [demo web application](https://java-faker.herokuapp.com/) that uses the library. + +====3 +1:49a +2:49a +3:52,56c + Contributions + ------------- + See [CONTRIBUTING.md](https://github.com/DiUS/java-faker/blob/master/CONTRIBUTING.md) + + +==== +1:56a +2:57c + * Back To The Future +3:64,65c + * Back To The Future + * Aviation +====1 +1:100a +2:102c +3:110c + * Princess Bride diff --git a/src/python/merge_conflict_analysis_diffs/530/git_hires_merge/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/530/git_hires_merge/diff_pom.xml.txt new file mode 100644 index 0000000000..3f4f080256 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/530/git_hires_merge/diff_pom.xml.txt @@ -0,0 +1,145 @@ +====1 +1:7c + 4.1.24-SNAPSHOT +2:7c +3:7c + 4.2.2-SNAPSHOT +====1 +1:20a +2:21c +3:21c + metrics-caffeine3 +====1 +1:28a +2:30,31c +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:37c +3:37c + metrics-jersey3 +====1 +1:34a +2:39,40c +3:39,40c + metrics-jetty10 + metrics-jetty11 +====1 +1:49c + 2.9.10 +2:55c +3:55c + 2.12.3 +====1 +1:50a +2:57,58c +3:57,58c + 10.0.5 + 11.0.5 +====1 +1:53c + 3.11.1 +2:61c +3:61c + 3.11.2 +====1 +1:54a +2:63c +3:63c + 1.3 +====1 +1:55a +2:65c +3:65c + 2.7.1 +====1 +1:124,158c + + + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} + + + junit + junit + ${junit.version} + + + org.assertj + assertj-core + ${assertj.version} + + + org.mockito + mockito-core + ${mockito.version} + + + org.hamcrest + hamcrest-core + 1.3 + + + + +2:133a +3:133a +====1 +1:181a +2:157,196c +3:157,196c + jdk16 + + 16 + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + true + + -Xlint:all + -XDcompilePolicy=simple + -Xplugin:ErrorProne -XepExcludedPaths:.*/target/generated-sources/.* + -J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED + + + + com.google.errorprone + error_prone_core + ${errorprone.version} + + + + + + + + +====1 +1:303c + 2.7.1 +2:318c +3:318c + ${errorprone.version} diff --git a/src/python/merge_conflict_analysis_diffs/530/gitmerge_ort/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/530/gitmerge_ort/diff_pom.xml.txt new file mode 100644 index 0000000000..b0a7a7c0ae --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/530/gitmerge_ort/diff_pom.xml.txt @@ -0,0 +1,152 @@ +==== +1:7c + 4.1.24-SNAPSHOT +2:7,13c + <<<<<<< HEAD + 4.2.2-SNAPSHOT + ||||||| 7ff7af179 + 4.1.24-SNAPSHOT + ======= + 4.1.25-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:7c + 4.2.2-SNAPSHOT +====1 +1:20a +2:27c +3:21c + metrics-caffeine3 +====1 +1:28a +2:36,37c +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:43c +3:37c + metrics-jersey3 +====1 +1:34a +2:45,46c +3:39,40c + metrics-jetty10 + metrics-jetty11 +====1 +1:49c + 2.9.10 +2:61c +3:55c + 2.12.3 +====1 +1:50a +2:63,64c +3:57,58c + 10.0.5 + 11.0.5 +====1 +1:53c + 3.11.1 +2:67c +3:61c + 3.11.2 +====1 +1:54a +2:69c +3:63c + 1.3 +====1 +1:55a +2:71c +3:65c + 2.7.1 +====1 +1:124,158c + + + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} + + + junit + junit + ${junit.version} + + + org.assertj + assertj-core + ${assertj.version} + + + org.mockito + mockito-core + ${mockito.version} + + + org.hamcrest + hamcrest-core + 1.3 + + + + +2:139a +3:133a +====1 +1:181a +2:163,202c +3:157,196c + jdk16 + + 16 + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + true + + -Xlint:all + -XDcompilePolicy=simple + -Xplugin:ErrorProne -XepExcludedPaths:.*/target/generated-sources/.* + -J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED + + + + com.google.errorprone + error_prone_core + ${errorprone.version} + + + + + + + + +====1 +1:303c + 2.7.1 +2:324c +3:318c + ${errorprone.version} diff --git a/src/python/merge_conflict_analysis_diffs/530/gitmerge_ort_adjacent/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/530/gitmerge_ort_adjacent/diff_pom.xml.txt new file mode 100644 index 0000000000..3f4f080256 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/530/gitmerge_ort_adjacent/diff_pom.xml.txt @@ -0,0 +1,145 @@ +====1 +1:7c + 4.1.24-SNAPSHOT +2:7c +3:7c + 4.2.2-SNAPSHOT +====1 +1:20a +2:21c +3:21c + metrics-caffeine3 +====1 +1:28a +2:30,31c +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:37c +3:37c + metrics-jersey3 +====1 +1:34a +2:39,40c +3:39,40c + metrics-jetty10 + metrics-jetty11 +====1 +1:49c + 2.9.10 +2:55c +3:55c + 2.12.3 +====1 +1:50a +2:57,58c +3:57,58c + 10.0.5 + 11.0.5 +====1 +1:53c + 3.11.1 +2:61c +3:61c + 3.11.2 +====1 +1:54a +2:63c +3:63c + 1.3 +====1 +1:55a +2:65c +3:65c + 2.7.1 +====1 +1:124,158c + + + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} + + + junit + junit + ${junit.version} + + + org.assertj + assertj-core + ${assertj.version} + + + org.mockito + mockito-core + ${mockito.version} + + + org.hamcrest + hamcrest-core + 1.3 + + + + +2:133a +3:133a +====1 +1:181a +2:157,196c +3:157,196c + jdk16 + + 16 + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + true + + -Xlint:all + -XDcompilePolicy=simple + -Xplugin:ErrorProne -XepExcludedPaths:.*/target/generated-sources/.* + -J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED + + + + com.google.errorprone + error_prone_core + ${errorprone.version} + + + + + + + + +====1 +1:303c + 2.7.1 +2:318c +3:318c + ${errorprone.version} diff --git a/src/python/merge_conflict_analysis_diffs/530/gitmerge_ort_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/530/gitmerge_ort_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..b0a7a7c0ae --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/530/gitmerge_ort_ignorespace/diff_pom.xml.txt @@ -0,0 +1,152 @@ +==== +1:7c + 4.1.24-SNAPSHOT +2:7,13c + <<<<<<< HEAD + 4.2.2-SNAPSHOT + ||||||| 7ff7af179 + 4.1.24-SNAPSHOT + ======= + 4.1.25-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:7c + 4.2.2-SNAPSHOT +====1 +1:20a +2:27c +3:21c + metrics-caffeine3 +====1 +1:28a +2:36,37c +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:43c +3:37c + metrics-jersey3 +====1 +1:34a +2:45,46c +3:39,40c + metrics-jetty10 + metrics-jetty11 +====1 +1:49c + 2.9.10 +2:61c +3:55c + 2.12.3 +====1 +1:50a +2:63,64c +3:57,58c + 10.0.5 + 11.0.5 +====1 +1:53c + 3.11.1 +2:67c +3:61c + 3.11.2 +====1 +1:54a +2:69c +3:63c + 1.3 +====1 +1:55a +2:71c +3:65c + 2.7.1 +====1 +1:124,158c + + + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} + + + junit + junit + ${junit.version} + + + org.assertj + assertj-core + ${assertj.version} + + + org.mockito + mockito-core + ${mockito.version} + + + org.hamcrest + hamcrest-core + 1.3 + + + + +2:139a +3:133a +====1 +1:181a +2:163,202c +3:157,196c + jdk16 + + 16 + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + true + + -Xlint:all + -XDcompilePolicy=simple + -Xplugin:ErrorProne -XepExcludedPaths:.*/target/generated-sources/.* + -J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED + + + + com.google.errorprone + error_prone_core + ${errorprone.version} + + + + + + + + +====1 +1:303c + 2.7.1 +2:324c +3:318c + ${errorprone.version} diff --git a/src/python/merge_conflict_analysis_diffs/530/gitmerge_ort_imports/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/530/gitmerge_ort_imports/diff_pom.xml.txt new file mode 100644 index 0000000000..3f4f080256 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/530/gitmerge_ort_imports/diff_pom.xml.txt @@ -0,0 +1,145 @@ +====1 +1:7c + 4.1.24-SNAPSHOT +2:7c +3:7c + 4.2.2-SNAPSHOT +====1 +1:20a +2:21c +3:21c + metrics-caffeine3 +====1 +1:28a +2:30,31c +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:37c +3:37c + metrics-jersey3 +====1 +1:34a +2:39,40c +3:39,40c + metrics-jetty10 + metrics-jetty11 +====1 +1:49c + 2.9.10 +2:55c +3:55c + 2.12.3 +====1 +1:50a +2:57,58c +3:57,58c + 10.0.5 + 11.0.5 +====1 +1:53c + 3.11.1 +2:61c +3:61c + 3.11.2 +====1 +1:54a +2:63c +3:63c + 1.3 +====1 +1:55a +2:65c +3:65c + 2.7.1 +====1 +1:124,158c + + + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} + + + junit + junit + ${junit.version} + + + org.assertj + assertj-core + ${assertj.version} + + + org.mockito + mockito-core + ${mockito.version} + + + org.hamcrest + hamcrest-core + 1.3 + + + + +2:133a +3:133a +====1 +1:181a +2:157,196c +3:157,196c + jdk16 + + 16 + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + true + + -Xlint:all + -XDcompilePolicy=simple + -Xplugin:ErrorProne -XepExcludedPaths:.*/target/generated-sources/.* + -J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED + + + + com.google.errorprone + error_prone_core + ${errorprone.version} + + + + + + + + +====1 +1:303c + 2.7.1 +2:318c +3:318c + ${errorprone.version} diff --git a/src/python/merge_conflict_analysis_diffs/530/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/530/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..3f4f080256 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/530/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt @@ -0,0 +1,145 @@ +====1 +1:7c + 4.1.24-SNAPSHOT +2:7c +3:7c + 4.2.2-SNAPSHOT +====1 +1:20a +2:21c +3:21c + metrics-caffeine3 +====1 +1:28a +2:30,31c +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:37c +3:37c + metrics-jersey3 +====1 +1:34a +2:39,40c +3:39,40c + metrics-jetty10 + metrics-jetty11 +====1 +1:49c + 2.9.10 +2:55c +3:55c + 2.12.3 +====1 +1:50a +2:57,58c +3:57,58c + 10.0.5 + 11.0.5 +====1 +1:53c + 3.11.1 +2:61c +3:61c + 3.11.2 +====1 +1:54a +2:63c +3:63c + 1.3 +====1 +1:55a +2:65c +3:65c + 2.7.1 +====1 +1:124,158c + + + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} + + + junit + junit + ${junit.version} + + + org.assertj + assertj-core + ${assertj.version} + + + org.mockito + mockito-core + ${mockito.version} + + + org.hamcrest + hamcrest-core + 1.3 + + + + +2:133a +3:133a +====1 +1:181a +2:157,196c +3:157,196c + jdk16 + + 16 + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + true + + -Xlint:all + -XDcompilePolicy=simple + -Xplugin:ErrorProne -XepExcludedPaths:.*/target/generated-sources/.* + -J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED + + + + com.google.errorprone + error_prone_core + ${errorprone.version} + + + + + + + + +====1 +1:303c + 2.7.1 +2:318c +3:318c + ${errorprone.version} diff --git a/src/python/merge_conflict_analysis_diffs/530/gitmerge_recursive_histogram/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/530/gitmerge_recursive_histogram/diff_pom.xml.txt new file mode 100644 index 0000000000..8ba60fa027 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/530/gitmerge_recursive_histogram/diff_pom.xml.txt @@ -0,0 +1,13 @@ +==== +1:8c + 4.1.24-SNAPSHOT +2:8,14c + <<<<<<< HEAD + 4.2.2-SNAPSHOT + ||||||| 7ff7af179 + 4.1.24-SNAPSHOT + ======= + 4.1.25-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:8c + 4.2.2-SNAPSHOT diff --git a/src/python/merge_conflict_analysis_diffs/530/gitmerge_recursive_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/530/gitmerge_recursive_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..8ba60fa027 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/530/gitmerge_recursive_ignorespace/diff_pom.xml.txt @@ -0,0 +1,13 @@ +==== +1:8c + 4.1.24-SNAPSHOT +2:8,14c + <<<<<<< HEAD + 4.2.2-SNAPSHOT + ||||||| 7ff7af179 + 4.1.24-SNAPSHOT + ======= + 4.1.25-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:8c + 4.2.2-SNAPSHOT diff --git a/src/python/merge_conflict_analysis_diffs/530/gitmerge_recursive_minimal/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/530/gitmerge_recursive_minimal/diff_pom.xml.txt new file mode 100644 index 0000000000..8ba60fa027 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/530/gitmerge_recursive_minimal/diff_pom.xml.txt @@ -0,0 +1,13 @@ +==== +1:8c + 4.1.24-SNAPSHOT +2:8,14c + <<<<<<< HEAD + 4.2.2-SNAPSHOT + ||||||| 7ff7af179 + 4.1.24-SNAPSHOT + ======= + 4.1.25-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:8c + 4.2.2-SNAPSHOT diff --git a/src/python/merge_conflict_analysis_diffs/530/gitmerge_recursive_myers/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/530/gitmerge_recursive_myers/diff_pom.xml.txt new file mode 100644 index 0000000000..8ba60fa027 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/530/gitmerge_recursive_myers/diff_pom.xml.txt @@ -0,0 +1,13 @@ +==== +1:8c + 4.1.24-SNAPSHOT +2:8,14c + <<<<<<< HEAD + 4.2.2-SNAPSHOT + ||||||| 7ff7af179 + 4.1.24-SNAPSHOT + ======= + 4.1.25-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:8c + 4.2.2-SNAPSHOT diff --git a/src/python/merge_conflict_analysis_diffs/530/gitmerge_recursive_patience/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/530/gitmerge_recursive_patience/diff_pom.xml.txt new file mode 100644 index 0000000000..8ba60fa027 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/530/gitmerge_recursive_patience/diff_pom.xml.txt @@ -0,0 +1,13 @@ +==== +1:8c + 4.1.24-SNAPSHOT +2:8,14c + <<<<<<< HEAD + 4.2.2-SNAPSHOT + ||||||| 7ff7af179 + 4.1.24-SNAPSHOT + ======= + 4.1.25-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:8c + 4.2.2-SNAPSHOT diff --git a/src/python/merge_conflict_analysis_diffs/530/intellimerge/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/530/intellimerge/diff_pom.xml.txt new file mode 100644 index 0000000000..b0a7a7c0ae --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/530/intellimerge/diff_pom.xml.txt @@ -0,0 +1,152 @@ +==== +1:7c + 4.1.24-SNAPSHOT +2:7,13c + <<<<<<< HEAD + 4.2.2-SNAPSHOT + ||||||| 7ff7af179 + 4.1.24-SNAPSHOT + ======= + 4.1.25-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:7c + 4.2.2-SNAPSHOT +====1 +1:20a +2:27c +3:21c + metrics-caffeine3 +====1 +1:28a +2:36,37c +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:43c +3:37c + metrics-jersey3 +====1 +1:34a +2:45,46c +3:39,40c + metrics-jetty10 + metrics-jetty11 +====1 +1:49c + 2.9.10 +2:61c +3:55c + 2.12.3 +====1 +1:50a +2:63,64c +3:57,58c + 10.0.5 + 11.0.5 +====1 +1:53c + 3.11.1 +2:67c +3:61c + 3.11.2 +====1 +1:54a +2:69c +3:63c + 1.3 +====1 +1:55a +2:71c +3:65c + 2.7.1 +====1 +1:124,158c + + + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} + + + junit + junit + ${junit.version} + + + org.assertj + assertj-core + ${assertj.version} + + + org.mockito + mockito-core + ${mockito.version} + + + org.hamcrest + hamcrest-core + 1.3 + + + + +2:139a +3:133a +====1 +1:181a +2:163,202c +3:157,196c + jdk16 + + 16 + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + true + + -Xlint:all + -XDcompilePolicy=simple + -Xplugin:ErrorProne -XepExcludedPaths:.*/target/generated-sources/.* + -J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED + + + + com.google.errorprone + error_prone_core + ${errorprone.version} + + + + + + + + +====1 +1:303c + 2.7.1 +2:324c +3:318c + ${errorprone.version} diff --git a/src/python/merge_conflict_analysis_diffs/530/spork/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/530/spork/diff_pom.xml.txt new file mode 100644 index 0000000000..3f4f080256 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/530/spork/diff_pom.xml.txt @@ -0,0 +1,145 @@ +====1 +1:7c + 4.1.24-SNAPSHOT +2:7c +3:7c + 4.2.2-SNAPSHOT +====1 +1:20a +2:21c +3:21c + metrics-caffeine3 +====1 +1:28a +2:30,31c +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:37c +3:37c + metrics-jersey3 +====1 +1:34a +2:39,40c +3:39,40c + metrics-jetty10 + metrics-jetty11 +====1 +1:49c + 2.9.10 +2:55c +3:55c + 2.12.3 +====1 +1:50a +2:57,58c +3:57,58c + 10.0.5 + 11.0.5 +====1 +1:53c + 3.11.1 +2:61c +3:61c + 3.11.2 +====1 +1:54a +2:63c +3:63c + 1.3 +====1 +1:55a +2:65c +3:65c + 2.7.1 +====1 +1:124,158c + + + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} + + + junit + junit + ${junit.version} + + + org.assertj + assertj-core + ${assertj.version} + + + org.mockito + mockito-core + ${mockito.version} + + + org.hamcrest + hamcrest-core + 1.3 + + + + +2:133a +3:133a +====1 +1:181a +2:157,196c +3:157,196c + jdk16 + + 16 + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + true + + -Xlint:all + -XDcompilePolicy=simple + -Xplugin:ErrorProne -XepExcludedPaths:.*/target/generated-sources/.* + -J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED + + + + com.google.errorprone + error_prone_core + ${errorprone.version} + + + + + + + + +====1 +1:303c + 2.7.1 +2:318c +3:318c + ${errorprone.version} diff --git a/src/python/merge_conflict_analysis_diffs/535/git_hires_merge/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/535/git_hires_merge/diff_pom.xml.txt new file mode 100644 index 0000000000..86f1a73a24 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/535/git_hires_merge/diff_pom.xml.txt @@ -0,0 +1,95 @@ +====1 +1:7c + 4.1.22-SNAPSHOT +2:7c +3:7c + 4.2.1-SNAPSHOT +====1 +1:20a +2:21c +3:21c + metrics-caffeine3 +====1 +1:28a +2:30,31c +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:37c +3:37c + metrics-jersey3 +====1 +1:34a +2:39,40c +3:39,40c + metrics-jetty10 + metrics-jetty11 +====1 +1:49c + 2.9.10.8 +2:55c +3:55c + 2.12.3 +====1 +1:50a +2:57,58c +3:57,58c + 10.0.2 + 11.0.2 +====1 +1:55a +2:64c +3:64c + 2.7.1 +====1 +1:178a +2:188,227c +3:188,227c + jdk16 + + 16 + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + true + + -Xlint:all + -XDcompilePolicy=simple + -Xplugin:ErrorProne -XepExcludedPaths:.*/target/generated-sources/.* + -J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED + + + + com.google.errorprone + error_prone_core + ${errorprone.version} + + + + + + + + +====1 +1:295c + 2.7.1 +2:344c +3:344c + ${errorprone.version} diff --git a/src/python/merge_conflict_analysis_diffs/535/gitmerge_ort/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/535/gitmerge_ort/diff_pom.xml.txt new file mode 100644 index 0000000000..70f3c2f963 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/535/gitmerge_ort/diff_pom.xml.txt @@ -0,0 +1,102 @@ +==== +1:7c + 4.1.22-SNAPSHOT +2:7,13c + <<<<<<< HEAD + 4.2.1-SNAPSHOT + ||||||| 57de13076 + 4.1.22-SNAPSHOT + ======= + 4.1.23-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:7c + 4.2.1-SNAPSHOT +====1 +1:20a +2:27c +3:21c + metrics-caffeine3 +====1 +1:28a +2:36,37c +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:43c +3:37c + metrics-jersey3 +====1 +1:34a +2:45,46c +3:39,40c + metrics-jetty10 + metrics-jetty11 +====1 +1:49c + 2.9.10.8 +2:61c +3:55c + 2.12.3 +====1 +1:50a +2:63,64c +3:57,58c + 10.0.2 + 11.0.2 +====1 +1:55a +2:70c +3:64c + 2.7.1 +====1 +1:178a +2:194,233c +3:188,227c + jdk16 + + 16 + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + true + + -Xlint:all + -XDcompilePolicy=simple + -Xplugin:ErrorProne -XepExcludedPaths:.*/target/generated-sources/.* + -J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED + + + + com.google.errorprone + error_prone_core + ${errorprone.version} + + + + + + + + +====1 +1:295c + 2.7.1 +2:350c +3:344c + ${errorprone.version} diff --git a/src/python/merge_conflict_analysis_diffs/535/gitmerge_ort_adjacent/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/535/gitmerge_ort_adjacent/diff_pom.xml.txt new file mode 100644 index 0000000000..86f1a73a24 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/535/gitmerge_ort_adjacent/diff_pom.xml.txt @@ -0,0 +1,95 @@ +====1 +1:7c + 4.1.22-SNAPSHOT +2:7c +3:7c + 4.2.1-SNAPSHOT +====1 +1:20a +2:21c +3:21c + metrics-caffeine3 +====1 +1:28a +2:30,31c +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:37c +3:37c + metrics-jersey3 +====1 +1:34a +2:39,40c +3:39,40c + metrics-jetty10 + metrics-jetty11 +====1 +1:49c + 2.9.10.8 +2:55c +3:55c + 2.12.3 +====1 +1:50a +2:57,58c +3:57,58c + 10.0.2 + 11.0.2 +====1 +1:55a +2:64c +3:64c + 2.7.1 +====1 +1:178a +2:188,227c +3:188,227c + jdk16 + + 16 + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + true + + -Xlint:all + -XDcompilePolicy=simple + -Xplugin:ErrorProne -XepExcludedPaths:.*/target/generated-sources/.* + -J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED + + + + com.google.errorprone + error_prone_core + ${errorprone.version} + + + + + + + + +====1 +1:295c + 2.7.1 +2:344c +3:344c + ${errorprone.version} diff --git a/src/python/merge_conflict_analysis_diffs/535/gitmerge_ort_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/535/gitmerge_ort_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..70f3c2f963 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/535/gitmerge_ort_ignorespace/diff_pom.xml.txt @@ -0,0 +1,102 @@ +==== +1:7c + 4.1.22-SNAPSHOT +2:7,13c + <<<<<<< HEAD + 4.2.1-SNAPSHOT + ||||||| 57de13076 + 4.1.22-SNAPSHOT + ======= + 4.1.23-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:7c + 4.2.1-SNAPSHOT +====1 +1:20a +2:27c +3:21c + metrics-caffeine3 +====1 +1:28a +2:36,37c +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:43c +3:37c + metrics-jersey3 +====1 +1:34a +2:45,46c +3:39,40c + metrics-jetty10 + metrics-jetty11 +====1 +1:49c + 2.9.10.8 +2:61c +3:55c + 2.12.3 +====1 +1:50a +2:63,64c +3:57,58c + 10.0.2 + 11.0.2 +====1 +1:55a +2:70c +3:64c + 2.7.1 +====1 +1:178a +2:194,233c +3:188,227c + jdk16 + + 16 + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + true + + -Xlint:all + -XDcompilePolicy=simple + -Xplugin:ErrorProne -XepExcludedPaths:.*/target/generated-sources/.* + -J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED + + + + com.google.errorprone + error_prone_core + ${errorprone.version} + + + + + + + + +====1 +1:295c + 2.7.1 +2:350c +3:344c + ${errorprone.version} diff --git a/src/python/merge_conflict_analysis_diffs/535/gitmerge_ort_imports/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/535/gitmerge_ort_imports/diff_pom.xml.txt new file mode 100644 index 0000000000..86f1a73a24 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/535/gitmerge_ort_imports/diff_pom.xml.txt @@ -0,0 +1,95 @@ +====1 +1:7c + 4.1.22-SNAPSHOT +2:7c +3:7c + 4.2.1-SNAPSHOT +====1 +1:20a +2:21c +3:21c + metrics-caffeine3 +====1 +1:28a +2:30,31c +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:37c +3:37c + metrics-jersey3 +====1 +1:34a +2:39,40c +3:39,40c + metrics-jetty10 + metrics-jetty11 +====1 +1:49c + 2.9.10.8 +2:55c +3:55c + 2.12.3 +====1 +1:50a +2:57,58c +3:57,58c + 10.0.2 + 11.0.2 +====1 +1:55a +2:64c +3:64c + 2.7.1 +====1 +1:178a +2:188,227c +3:188,227c + jdk16 + + 16 + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + true + + -Xlint:all + -XDcompilePolicy=simple + -Xplugin:ErrorProne -XepExcludedPaths:.*/target/generated-sources/.* + -J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED + + + + com.google.errorprone + error_prone_core + ${errorprone.version} + + + + + + + + +====1 +1:295c + 2.7.1 +2:344c +3:344c + ${errorprone.version} diff --git a/src/python/merge_conflict_analysis_diffs/535/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/535/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..86f1a73a24 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/535/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt @@ -0,0 +1,95 @@ +====1 +1:7c + 4.1.22-SNAPSHOT +2:7c +3:7c + 4.2.1-SNAPSHOT +====1 +1:20a +2:21c +3:21c + metrics-caffeine3 +====1 +1:28a +2:30,31c +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:37c +3:37c + metrics-jersey3 +====1 +1:34a +2:39,40c +3:39,40c + metrics-jetty10 + metrics-jetty11 +====1 +1:49c + 2.9.10.8 +2:55c +3:55c + 2.12.3 +====1 +1:50a +2:57,58c +3:57,58c + 10.0.2 + 11.0.2 +====1 +1:55a +2:64c +3:64c + 2.7.1 +====1 +1:178a +2:188,227c +3:188,227c + jdk16 + + 16 + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + true + + -Xlint:all + -XDcompilePolicy=simple + -Xplugin:ErrorProne -XepExcludedPaths:.*/target/generated-sources/.* + -J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED + + + + com.google.errorprone + error_prone_core + ${errorprone.version} + + + + + + + + +====1 +1:295c + 2.7.1 +2:344c +3:344c + ${errorprone.version} diff --git a/src/python/merge_conflict_analysis_diffs/535/gitmerge_recursive_histogram/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/535/gitmerge_recursive_histogram/diff_pom.xml.txt new file mode 100644 index 0000000000..55ee4243a0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/535/gitmerge_recursive_histogram/diff_pom.xml.txt @@ -0,0 +1,13 @@ +==== +1:8c + 4.1.22-SNAPSHOT +2:8,14c + <<<<<<< HEAD + 4.2.1-SNAPSHOT + ||||||| 57de13076 + 4.1.22-SNAPSHOT + ======= + 4.1.23-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:8c + 4.2.1-SNAPSHOT diff --git a/src/python/merge_conflict_analysis_diffs/535/gitmerge_recursive_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/535/gitmerge_recursive_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..55ee4243a0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/535/gitmerge_recursive_ignorespace/diff_pom.xml.txt @@ -0,0 +1,13 @@ +==== +1:8c + 4.1.22-SNAPSHOT +2:8,14c + <<<<<<< HEAD + 4.2.1-SNAPSHOT + ||||||| 57de13076 + 4.1.22-SNAPSHOT + ======= + 4.1.23-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:8c + 4.2.1-SNAPSHOT diff --git a/src/python/merge_conflict_analysis_diffs/535/gitmerge_recursive_minimal/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/535/gitmerge_recursive_minimal/diff_pom.xml.txt new file mode 100644 index 0000000000..55ee4243a0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/535/gitmerge_recursive_minimal/diff_pom.xml.txt @@ -0,0 +1,13 @@ +==== +1:8c + 4.1.22-SNAPSHOT +2:8,14c + <<<<<<< HEAD + 4.2.1-SNAPSHOT + ||||||| 57de13076 + 4.1.22-SNAPSHOT + ======= + 4.1.23-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:8c + 4.2.1-SNAPSHOT diff --git a/src/python/merge_conflict_analysis_diffs/535/gitmerge_recursive_myers/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/535/gitmerge_recursive_myers/diff_pom.xml.txt new file mode 100644 index 0000000000..55ee4243a0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/535/gitmerge_recursive_myers/diff_pom.xml.txt @@ -0,0 +1,13 @@ +==== +1:8c + 4.1.22-SNAPSHOT +2:8,14c + <<<<<<< HEAD + 4.2.1-SNAPSHOT + ||||||| 57de13076 + 4.1.22-SNAPSHOT + ======= + 4.1.23-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:8c + 4.2.1-SNAPSHOT diff --git a/src/python/merge_conflict_analysis_diffs/535/gitmerge_recursive_patience/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/535/gitmerge_recursive_patience/diff_pom.xml.txt new file mode 100644 index 0000000000..55ee4243a0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/535/gitmerge_recursive_patience/diff_pom.xml.txt @@ -0,0 +1,13 @@ +==== +1:8c + 4.1.22-SNAPSHOT +2:8,14c + <<<<<<< HEAD + 4.2.1-SNAPSHOT + ||||||| 57de13076 + 4.1.22-SNAPSHOT + ======= + 4.1.23-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:8c + 4.2.1-SNAPSHOT diff --git a/src/python/merge_conflict_analysis_diffs/535/intellimerge/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/535/intellimerge/diff_pom.xml.txt new file mode 100644 index 0000000000..70f3c2f963 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/535/intellimerge/diff_pom.xml.txt @@ -0,0 +1,102 @@ +==== +1:7c + 4.1.22-SNAPSHOT +2:7,13c + <<<<<<< HEAD + 4.2.1-SNAPSHOT + ||||||| 57de13076 + 4.1.22-SNAPSHOT + ======= + 4.1.23-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:7c + 4.2.1-SNAPSHOT +====1 +1:20a +2:27c +3:21c + metrics-caffeine3 +====1 +1:28a +2:36,37c +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:43c +3:37c + metrics-jersey3 +====1 +1:34a +2:45,46c +3:39,40c + metrics-jetty10 + metrics-jetty11 +====1 +1:49c + 2.9.10.8 +2:61c +3:55c + 2.12.3 +====1 +1:50a +2:63,64c +3:57,58c + 10.0.2 + 11.0.2 +====1 +1:55a +2:70c +3:64c + 2.7.1 +====1 +1:178a +2:194,233c +3:188,227c + jdk16 + + 16 + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + true + + -Xlint:all + -XDcompilePolicy=simple + -Xplugin:ErrorProne -XepExcludedPaths:.*/target/generated-sources/.* + -J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED + + + + com.google.errorprone + error_prone_core + ${errorprone.version} + + + + + + + + +====1 +1:295c + 2.7.1 +2:350c +3:344c + ${errorprone.version} diff --git a/src/python/merge_conflict_analysis_diffs/535/spork/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/535/spork/diff_pom.xml.txt new file mode 100644 index 0000000000..86f1a73a24 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/535/spork/diff_pom.xml.txt @@ -0,0 +1,95 @@ +====1 +1:7c + 4.1.22-SNAPSHOT +2:7c +3:7c + 4.2.1-SNAPSHOT +====1 +1:20a +2:21c +3:21c + metrics-caffeine3 +====1 +1:28a +2:30,31c +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:37c +3:37c + metrics-jersey3 +====1 +1:34a +2:39,40c +3:39,40c + metrics-jetty10 + metrics-jetty11 +====1 +1:49c + 2.9.10.8 +2:55c +3:55c + 2.12.3 +====1 +1:50a +2:57,58c +3:57,58c + 10.0.2 + 11.0.2 +====1 +1:55a +2:64c +3:64c + 2.7.1 +====1 +1:178a +2:188,227c +3:188,227c + jdk16 + + 16 + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + true + + -Xlint:all + -XDcompilePolicy=simple + -Xplugin:ErrorProne -XepExcludedPaths:.*/target/generated-sources/.* + -J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED + + + + com.google.errorprone + error_prone_core + ${errorprone.version} + + + + + + + + +====1 +1:295c + 2.7.1 +2:344c +3:344c + ${errorprone.version} diff --git a/src/python/merge_conflict_analysis_diffs/548/git_hires_merge/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/548/git_hires_merge/diff_pom.xml.txt new file mode 100644 index 0000000000..34201df19f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/548/git_hires_merge/diff_pom.xml.txt @@ -0,0 +1,43 @@ +====1 +1:7c + 4.1.18-SNAPSHOT +2:7c +3:7c + 4.2.0-SNAPSHOT +====1 +1:28a +2:29,30c +3:29,30c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:36c +3:36c + metrics-jersey3 +====1 +1:34a +2:38,39c +3:38,39c + metrics-jetty10 + metrics-jetty11 +==== +1:49,50c + 2.9.10.8 + 9.4.36.v20210114 +2:54,57c + 2.12.1 + 9.4.36.v20210114 + 10.0.0 + 11.0.0 +3:54,57c + 2.12.1 + 9.4.37.v20210219 + 10.0.0 + 11.0.0 +====3 +1:53c +2:60c + 3.7.7 +3:60c + 3.8.0 diff --git a/src/python/merge_conflict_analysis_diffs/548/gitmerge_ort/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/548/gitmerge_ort/diff_pom.xml.txt new file mode 100644 index 0000000000..76c4f0a55c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/548/gitmerge_ort/diff_pom.xml.txt @@ -0,0 +1,51 @@ +====1 +1:7c + 4.1.18-SNAPSHOT +2:7c +3:7c + 4.2.0-SNAPSHOT +====1 +1:28a +2:29,30c +3:29,30c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:36c +3:36c + metrics-jersey3 +====1 +1:34a +2:38,39c +3:38,39c + metrics-jetty10 + metrics-jetty11 +==== +1:49,50c + 2.9.10.8 + 9.4.36.v20210114 +2:54,65c + <<<<<<< HEAD + 2.12.1 + 9.4.36.v20210114 + 10.0.0 + 11.0.0 + ||||||| 229c8236f + 2.9.10.8 + 9.4.36.v20210114 + ======= + 2.9.10.8 + 9.4.37.v20210219 + >>>>>>> TEMP_RIGHT_BRANCH +3:54,57c + 2.12.1 + 9.4.37.v20210219 + 10.0.0 + 11.0.0 +====1 +1:53c + 3.7.7 +2:68c +3:60c + 3.8.0 diff --git a/src/python/merge_conflict_analysis_diffs/548/gitmerge_ort_adjacent/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/548/gitmerge_ort_adjacent/diff_pom.xml.txt new file mode 100644 index 0000000000..34201df19f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/548/gitmerge_ort_adjacent/diff_pom.xml.txt @@ -0,0 +1,43 @@ +====1 +1:7c + 4.1.18-SNAPSHOT +2:7c +3:7c + 4.2.0-SNAPSHOT +====1 +1:28a +2:29,30c +3:29,30c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:36c +3:36c + metrics-jersey3 +====1 +1:34a +2:38,39c +3:38,39c + metrics-jetty10 + metrics-jetty11 +==== +1:49,50c + 2.9.10.8 + 9.4.36.v20210114 +2:54,57c + 2.12.1 + 9.4.36.v20210114 + 10.0.0 + 11.0.0 +3:54,57c + 2.12.1 + 9.4.37.v20210219 + 10.0.0 + 11.0.0 +====3 +1:53c +2:60c + 3.7.7 +3:60c + 3.8.0 diff --git a/src/python/merge_conflict_analysis_diffs/548/gitmerge_ort_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/548/gitmerge_ort_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..76c4f0a55c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/548/gitmerge_ort_ignorespace/diff_pom.xml.txt @@ -0,0 +1,51 @@ +====1 +1:7c + 4.1.18-SNAPSHOT +2:7c +3:7c + 4.2.0-SNAPSHOT +====1 +1:28a +2:29,30c +3:29,30c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:36c +3:36c + metrics-jersey3 +====1 +1:34a +2:38,39c +3:38,39c + metrics-jetty10 + metrics-jetty11 +==== +1:49,50c + 2.9.10.8 + 9.4.36.v20210114 +2:54,65c + <<<<<<< HEAD + 2.12.1 + 9.4.36.v20210114 + 10.0.0 + 11.0.0 + ||||||| 229c8236f + 2.9.10.8 + 9.4.36.v20210114 + ======= + 2.9.10.8 + 9.4.37.v20210219 + >>>>>>> TEMP_RIGHT_BRANCH +3:54,57c + 2.12.1 + 9.4.37.v20210219 + 10.0.0 + 11.0.0 +====1 +1:53c + 3.7.7 +2:68c +3:60c + 3.8.0 diff --git a/src/python/merge_conflict_analysis_diffs/548/gitmerge_ort_imports/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/548/gitmerge_ort_imports/diff_pom.xml.txt new file mode 100644 index 0000000000..34201df19f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/548/gitmerge_ort_imports/diff_pom.xml.txt @@ -0,0 +1,43 @@ +====1 +1:7c + 4.1.18-SNAPSHOT +2:7c +3:7c + 4.2.0-SNAPSHOT +====1 +1:28a +2:29,30c +3:29,30c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:36c +3:36c + metrics-jersey3 +====1 +1:34a +2:38,39c +3:38,39c + metrics-jetty10 + metrics-jetty11 +==== +1:49,50c + 2.9.10.8 + 9.4.36.v20210114 +2:54,57c + 2.12.1 + 9.4.36.v20210114 + 10.0.0 + 11.0.0 +3:54,57c + 2.12.1 + 9.4.37.v20210219 + 10.0.0 + 11.0.0 +====3 +1:53c +2:60c + 3.7.7 +3:60c + 3.8.0 diff --git a/src/python/merge_conflict_analysis_diffs/548/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/548/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..34201df19f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/548/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt @@ -0,0 +1,43 @@ +====1 +1:7c + 4.1.18-SNAPSHOT +2:7c +3:7c + 4.2.0-SNAPSHOT +====1 +1:28a +2:29,30c +3:29,30c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:36c +3:36c + metrics-jersey3 +====1 +1:34a +2:38,39c +3:38,39c + metrics-jetty10 + metrics-jetty11 +==== +1:49,50c + 2.9.10.8 + 9.4.36.v20210114 +2:54,57c + 2.12.1 + 9.4.36.v20210114 + 10.0.0 + 11.0.0 +3:54,57c + 2.12.1 + 9.4.37.v20210219 + 10.0.0 + 11.0.0 +====3 +1:53c +2:60c + 3.7.7 +3:60c + 3.8.0 diff --git a/src/python/merge_conflict_analysis_diffs/548/gitmerge_recursive_histogram/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/548/gitmerge_recursive_histogram/diff_pom.xml.txt new file mode 100644 index 0000000000..76c4f0a55c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/548/gitmerge_recursive_histogram/diff_pom.xml.txt @@ -0,0 +1,51 @@ +====1 +1:7c + 4.1.18-SNAPSHOT +2:7c +3:7c + 4.2.0-SNAPSHOT +====1 +1:28a +2:29,30c +3:29,30c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:36c +3:36c + metrics-jersey3 +====1 +1:34a +2:38,39c +3:38,39c + metrics-jetty10 + metrics-jetty11 +==== +1:49,50c + 2.9.10.8 + 9.4.36.v20210114 +2:54,65c + <<<<<<< HEAD + 2.12.1 + 9.4.36.v20210114 + 10.0.0 + 11.0.0 + ||||||| 229c8236f + 2.9.10.8 + 9.4.36.v20210114 + ======= + 2.9.10.8 + 9.4.37.v20210219 + >>>>>>> TEMP_RIGHT_BRANCH +3:54,57c + 2.12.1 + 9.4.37.v20210219 + 10.0.0 + 11.0.0 +====1 +1:53c + 3.7.7 +2:68c +3:60c + 3.8.0 diff --git a/src/python/merge_conflict_analysis_diffs/548/gitmerge_recursive_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/548/gitmerge_recursive_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..76c4f0a55c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/548/gitmerge_recursive_ignorespace/diff_pom.xml.txt @@ -0,0 +1,51 @@ +====1 +1:7c + 4.1.18-SNAPSHOT +2:7c +3:7c + 4.2.0-SNAPSHOT +====1 +1:28a +2:29,30c +3:29,30c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:36c +3:36c + metrics-jersey3 +====1 +1:34a +2:38,39c +3:38,39c + metrics-jetty10 + metrics-jetty11 +==== +1:49,50c + 2.9.10.8 + 9.4.36.v20210114 +2:54,65c + <<<<<<< HEAD + 2.12.1 + 9.4.36.v20210114 + 10.0.0 + 11.0.0 + ||||||| 229c8236f + 2.9.10.8 + 9.4.36.v20210114 + ======= + 2.9.10.8 + 9.4.37.v20210219 + >>>>>>> TEMP_RIGHT_BRANCH +3:54,57c + 2.12.1 + 9.4.37.v20210219 + 10.0.0 + 11.0.0 +====1 +1:53c + 3.7.7 +2:68c +3:60c + 3.8.0 diff --git a/src/python/merge_conflict_analysis_diffs/548/gitmerge_recursive_minimal/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/548/gitmerge_recursive_minimal/diff_pom.xml.txt new file mode 100644 index 0000000000..76c4f0a55c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/548/gitmerge_recursive_minimal/diff_pom.xml.txt @@ -0,0 +1,51 @@ +====1 +1:7c + 4.1.18-SNAPSHOT +2:7c +3:7c + 4.2.0-SNAPSHOT +====1 +1:28a +2:29,30c +3:29,30c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:36c +3:36c + metrics-jersey3 +====1 +1:34a +2:38,39c +3:38,39c + metrics-jetty10 + metrics-jetty11 +==== +1:49,50c + 2.9.10.8 + 9.4.36.v20210114 +2:54,65c + <<<<<<< HEAD + 2.12.1 + 9.4.36.v20210114 + 10.0.0 + 11.0.0 + ||||||| 229c8236f + 2.9.10.8 + 9.4.36.v20210114 + ======= + 2.9.10.8 + 9.4.37.v20210219 + >>>>>>> TEMP_RIGHT_BRANCH +3:54,57c + 2.12.1 + 9.4.37.v20210219 + 10.0.0 + 11.0.0 +====1 +1:53c + 3.7.7 +2:68c +3:60c + 3.8.0 diff --git a/src/python/merge_conflict_analysis_diffs/548/gitmerge_recursive_myers/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/548/gitmerge_recursive_myers/diff_pom.xml.txt new file mode 100644 index 0000000000..76c4f0a55c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/548/gitmerge_recursive_myers/diff_pom.xml.txt @@ -0,0 +1,51 @@ +====1 +1:7c + 4.1.18-SNAPSHOT +2:7c +3:7c + 4.2.0-SNAPSHOT +====1 +1:28a +2:29,30c +3:29,30c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:36c +3:36c + metrics-jersey3 +====1 +1:34a +2:38,39c +3:38,39c + metrics-jetty10 + metrics-jetty11 +==== +1:49,50c + 2.9.10.8 + 9.4.36.v20210114 +2:54,65c + <<<<<<< HEAD + 2.12.1 + 9.4.36.v20210114 + 10.0.0 + 11.0.0 + ||||||| 229c8236f + 2.9.10.8 + 9.4.36.v20210114 + ======= + 2.9.10.8 + 9.4.37.v20210219 + >>>>>>> TEMP_RIGHT_BRANCH +3:54,57c + 2.12.1 + 9.4.37.v20210219 + 10.0.0 + 11.0.0 +====1 +1:53c + 3.7.7 +2:68c +3:60c + 3.8.0 diff --git a/src/python/merge_conflict_analysis_diffs/548/gitmerge_recursive_patience/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/548/gitmerge_recursive_patience/diff_pom.xml.txt new file mode 100644 index 0000000000..76c4f0a55c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/548/gitmerge_recursive_patience/diff_pom.xml.txt @@ -0,0 +1,51 @@ +====1 +1:7c + 4.1.18-SNAPSHOT +2:7c +3:7c + 4.2.0-SNAPSHOT +====1 +1:28a +2:29,30c +3:29,30c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:36c +3:36c + metrics-jersey3 +====1 +1:34a +2:38,39c +3:38,39c + metrics-jetty10 + metrics-jetty11 +==== +1:49,50c + 2.9.10.8 + 9.4.36.v20210114 +2:54,65c + <<<<<<< HEAD + 2.12.1 + 9.4.36.v20210114 + 10.0.0 + 11.0.0 + ||||||| 229c8236f + 2.9.10.8 + 9.4.36.v20210114 + ======= + 2.9.10.8 + 9.4.37.v20210219 + >>>>>>> TEMP_RIGHT_BRANCH +3:54,57c + 2.12.1 + 9.4.37.v20210219 + 10.0.0 + 11.0.0 +====1 +1:53c + 3.7.7 +2:68c +3:60c + 3.8.0 diff --git a/src/python/merge_conflict_analysis_diffs/548/intellimerge/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/548/intellimerge/diff_pom.xml.txt new file mode 100644 index 0000000000..76c4f0a55c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/548/intellimerge/diff_pom.xml.txt @@ -0,0 +1,51 @@ +====1 +1:7c + 4.1.18-SNAPSHOT +2:7c +3:7c + 4.2.0-SNAPSHOT +====1 +1:28a +2:29,30c +3:29,30c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:36c +3:36c + metrics-jersey3 +====1 +1:34a +2:38,39c +3:38,39c + metrics-jetty10 + metrics-jetty11 +==== +1:49,50c + 2.9.10.8 + 9.4.36.v20210114 +2:54,65c + <<<<<<< HEAD + 2.12.1 + 9.4.36.v20210114 + 10.0.0 + 11.0.0 + ||||||| 229c8236f + 2.9.10.8 + 9.4.36.v20210114 + ======= + 2.9.10.8 + 9.4.37.v20210219 + >>>>>>> TEMP_RIGHT_BRANCH +3:54,57c + 2.12.1 + 9.4.37.v20210219 + 10.0.0 + 11.0.0 +====1 +1:53c + 3.7.7 +2:68c +3:60c + 3.8.0 diff --git a/src/python/merge_conflict_analysis_diffs/548/spork/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/548/spork/diff_pom.xml.txt new file mode 100644 index 0000000000..34201df19f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/548/spork/diff_pom.xml.txt @@ -0,0 +1,43 @@ +====1 +1:7c + 4.1.18-SNAPSHOT +2:7c +3:7c + 4.2.0-SNAPSHOT +====1 +1:28a +2:29,30c +3:29,30c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:33a +2:36c +3:36c + metrics-jersey3 +====1 +1:34a +2:38,39c +3:38,39c + metrics-jetty10 + metrics-jetty11 +==== +1:49,50c + 2.9.10.8 + 9.4.36.v20210114 +2:54,57c + 2.12.1 + 9.4.36.v20210114 + 10.0.0 + 11.0.0 +3:54,57c + 2.12.1 + 9.4.37.v20210219 + 10.0.0 + 11.0.0 +====3 +1:53c +2:60c + 3.7.7 +3:60c + 3.8.0 diff --git a/src/python/merge_conflict_analysis_diffs/582/git_hires_merge/diff_MetricRegistry.java.txt b/src/python/merge_conflict_analysis_diffs/582/git_hires_merge/diff_MetricRegistry.java.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/python/merge_conflict_analysis_diffs/582/git_hires_merge/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/582/git_hires_merge/diff_pom.xml.txt new file mode 100644 index 0000000000..63000511b5 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/git_hires_merge/diff_pom.xml.txt @@ -0,0 +1,212 @@ +====1 +1:5c + io.dropwizard.metrics +2:5c +3:5c + io.dropwizard.metrics5 +====1 +1:7c + 4.1.17-SNAPSHOT +2:7c +3:7c + 5.0.0-rc5-SNAPSHOT +====1 +1:9c + Metrics Parent +2:9c +3:9c + Metrics5 Parent +====1 +1:22a +2:23c +3:23c + metrics-healthchecks +====1 +1:25c + metrics-healthchecks +2:26c +3:26c + metrics-influxdb +====3 +1:28a +2:29a +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:30c + metrics-jcstress +2:30a +3:32a +====1 +1:38c + metrics-jmx +2:37a +3:39a +====1 +1:44a +2:44,47c +3:46,49c + metrics-jcstress + metrics-jmx + metrics-legacy-adapter + metrics-legacy-adapter-healthchecks +==== +1:52,53c + 2.9.10.7 + 9.4.35.v20201120 +2:55c + 9.4.34.v20201102 +3:57,58c + 2.12.1 + 9.4.35.v20201120 +==== +1:58c + 3.6.28 +2:60c + 3.6.0 +3:63c + 3.7.0 +====3 +1:118c +2:120c + sonatype-nexus-snapshots +3:123c + ossrh +====3 +1:123c +2:125c + sonatype-nexus-staging +3:128c + ossrh +====3 +1:190a +2:192a +3:196,198c + + EDA86E9FB607B5FC9223FB767D4868B53E31E7AD + +====3 +1:194a +2:196a +3:203,234c + maven-source-plugin + 3.2.1 + + + attach-sources + + jar + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + 3.2.0 + + 8 + none + true + true + + + + attach-javadocs + + jar + + + + + + org.apache.maven.plugins +====3 +1:199c +2:201c + --no-tty +3:239,240c + --pinentry-mode + loopback +====3 +1:211a +2:213a +3:253,271c + + org.sonatype.plugins + nexus-staging-maven-plugin + 1.6.8 + + ossrh + https://oss.sonatype.org/ + true + + + + nexus-deploy + deploy + + deploy + + + + +====3 +1:215c +2:217c + +3:274a +====1 +1:219,227c + + + + org.apache.maven.plugins + maven-deploy-plugin + 2.8.2 + + + +2:220a +3:277a +====3 +1:283,295c +2:276,288c + maven-source-plugin + 3.2.1 + + + attach-sources + + jar + + + + + + org.apache.maven.plugins +3:332a +====3 +1:319,337c +2:312,330c + maven-javadoc-plugin + 3.2.0 + + 8 + none + true + true + + + + attach-javadocs + + jar + + + + + + org.apache.maven.plugins +3:355a diff --git a/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort/diff_MetricRegistry.java.txt b/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort/diff_MetricRegistry.java.txt new file mode 100644 index 0000000000..6cb2c8aeb1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort/diff_MetricRegistry.java.txt @@ -0,0 +1,20 @@ +283d282 +< <<<<<<< HEAD:metrics-core/src/main/java/io/dropwizard/metrics5/MetricRegistry.java +286,294d284 +< ||||||| 63d5c8f68:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +< @SuppressWarnings("rawtypes") +< public Gauge gauge(String name, final MetricSupplier supplier) { +< return getOrAdd(name, new MetricBuilder() { +< ======= +< @SuppressWarnings("rawtypes") +< public T gauge(String name, final MetricSupplier supplier) { +< return getOrAdd(name, new MetricBuilder() { +< >>>>>>> TEMP_RIGHT_BRANCH:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +296d285 +< <<<<<<< HEAD:metrics-core/src/main/java/io/dropwizard/metrics5/MetricRegistry.java +298,302d286 +< ||||||| 63d5c8f68:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +< public Gauge newMetric() { +< ======= +< public T newMetric() { +< >>>>>>> TEMP_RIGHT_BRANCH:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java diff --git a/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort/diff_pom.xml.txt new file mode 100644 index 0000000000..86f4628a30 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort/diff_pom.xml.txt @@ -0,0 +1,234 @@ +====1 +1:5c + io.dropwizard.metrics +2:5c +3:5c + io.dropwizard.metrics5 +==== +1:7c + 4.1.17-SNAPSHOT +2:7,13c + <<<<<<< HEAD + 5.0.0-rc5-SNAPSHOT + ||||||| 63d5c8f68 + 4.1.17-SNAPSHOT + ======= + 4.2.0-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:7c + 5.0.0-rc5-SNAPSHOT +====1 +1:9c + Metrics Parent +2:15c +3:9c + Metrics5 Parent +====1 +1:22a +2:29c +3:23c + metrics-healthchecks +====1 +1:25c + metrics-healthchecks +2:32c +3:26c + metrics-influxdb +====1 +1:28a +2:36,37c +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:30c + metrics-jcstress +2:38a +3:32a +====1 +1:38c + metrics-jmx +2:45a +3:39a +====1 +1:44a +2:52,55c +3:46,49c + metrics-jcstress + metrics-jmx + metrics-legacy-adapter + metrics-legacy-adapter-healthchecks +==== +1:52c + 2.9.10.7 +2:63,69c + <<<<<<< HEAD + 9.4.34.v20201102 + ||||||| 63d5c8f68 + 2.9.10.7 + 9.4.35.v20201120 + ======= + 2.12.1 +3:57c + 2.12.1 +====2 +1:53a +3:58a +2:71c + >>>>>>> TEMP_RIGHT_BRANCH +==== +1:58c + 3.6.28 +2:76,82c + <<<<<<< HEAD + 3.6.0 + ||||||| 63d5c8f68 + 3.6.28 + ======= + 3.7.0 + >>>>>>> TEMP_RIGHT_BRANCH +3:63c + 3.7.0 +====1 +1:118c + sonatype-nexus-snapshots +2:142c +3:123c + ossrh +====1 +1:123c + sonatype-nexus-staging +2:147c +3:128c + ossrh +====1 +1:190a +2:215,217c +3:196,198c + + EDA86E9FB607B5FC9223FB767D4868B53E31E7AD + +====1 +1:194a +2:222,253c +3:203,234c + maven-source-plugin + 3.2.1 + + + attach-sources + + jar + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + 3.2.0 + + 8 + none + true + true + + + + attach-javadocs + + jar + + + + + + org.apache.maven.plugins +====1 +1:199c + --no-tty +2:258,259c +3:239,240c + --pinentry-mode + loopback +====1 +1:211a +2:272,290c +3:253,271c + + org.sonatype.plugins + nexus-staging-maven-plugin + 1.6.8 + + ossrh + https://oss.sonatype.org/ + true + + + + nexus-deploy + deploy + + deploy + + + + +====1 +1:215c + +2:293a +3:274a +====1 +1:219,227c + + + + org.apache.maven.plugins + maven-deploy-plugin + 2.8.2 + + + +2:296a +3:277a +====1 +1:283,295c + maven-source-plugin + 3.2.1 + + + attach-sources + + jar + + + + + + org.apache.maven.plugins +2:351a +3:332a +====1 +1:319,337c + maven-javadoc-plugin + 3.2.0 + + 8 + none + true + true + + + + attach-javadocs + + jar + + + + + + org.apache.maven.plugins +2:374a +3:355a diff --git a/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort_adjacent/diff_MetricRegistry.java.txt b/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort_adjacent/diff_MetricRegistry.java.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort_adjacent/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort_adjacent/diff_pom.xml.txt new file mode 100644 index 0000000000..63000511b5 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort_adjacent/diff_pom.xml.txt @@ -0,0 +1,212 @@ +====1 +1:5c + io.dropwizard.metrics +2:5c +3:5c + io.dropwizard.metrics5 +====1 +1:7c + 4.1.17-SNAPSHOT +2:7c +3:7c + 5.0.0-rc5-SNAPSHOT +====1 +1:9c + Metrics Parent +2:9c +3:9c + Metrics5 Parent +====1 +1:22a +2:23c +3:23c + metrics-healthchecks +====1 +1:25c + metrics-healthchecks +2:26c +3:26c + metrics-influxdb +====3 +1:28a +2:29a +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:30c + metrics-jcstress +2:30a +3:32a +====1 +1:38c + metrics-jmx +2:37a +3:39a +====1 +1:44a +2:44,47c +3:46,49c + metrics-jcstress + metrics-jmx + metrics-legacy-adapter + metrics-legacy-adapter-healthchecks +==== +1:52,53c + 2.9.10.7 + 9.4.35.v20201120 +2:55c + 9.4.34.v20201102 +3:57,58c + 2.12.1 + 9.4.35.v20201120 +==== +1:58c + 3.6.28 +2:60c + 3.6.0 +3:63c + 3.7.0 +====3 +1:118c +2:120c + sonatype-nexus-snapshots +3:123c + ossrh +====3 +1:123c +2:125c + sonatype-nexus-staging +3:128c + ossrh +====3 +1:190a +2:192a +3:196,198c + + EDA86E9FB607B5FC9223FB767D4868B53E31E7AD + +====3 +1:194a +2:196a +3:203,234c + maven-source-plugin + 3.2.1 + + + attach-sources + + jar + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + 3.2.0 + + 8 + none + true + true + + + + attach-javadocs + + jar + + + + + + org.apache.maven.plugins +====3 +1:199c +2:201c + --no-tty +3:239,240c + --pinentry-mode + loopback +====3 +1:211a +2:213a +3:253,271c + + org.sonatype.plugins + nexus-staging-maven-plugin + 1.6.8 + + ossrh + https://oss.sonatype.org/ + true + + + + nexus-deploy + deploy + + deploy + + + + +====3 +1:215c +2:217c + +3:274a +====1 +1:219,227c + + + + org.apache.maven.plugins + maven-deploy-plugin + 2.8.2 + + + +2:220a +3:277a +====3 +1:283,295c +2:276,288c + maven-source-plugin + 3.2.1 + + + attach-sources + + jar + + + + + + org.apache.maven.plugins +3:332a +====3 +1:319,337c +2:312,330c + maven-javadoc-plugin + 3.2.0 + + 8 + none + true + true + + + + attach-javadocs + + jar + + + + + + org.apache.maven.plugins +3:355a diff --git a/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort_ignorespace/diff_MetricRegistry.java.txt b/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort_ignorespace/diff_MetricRegistry.java.txt new file mode 100644 index 0000000000..6cb2c8aeb1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort_ignorespace/diff_MetricRegistry.java.txt @@ -0,0 +1,20 @@ +283d282 +< <<<<<<< HEAD:metrics-core/src/main/java/io/dropwizard/metrics5/MetricRegistry.java +286,294d284 +< ||||||| 63d5c8f68:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +< @SuppressWarnings("rawtypes") +< public Gauge gauge(String name, final MetricSupplier supplier) { +< return getOrAdd(name, new MetricBuilder() { +< ======= +< @SuppressWarnings("rawtypes") +< public T gauge(String name, final MetricSupplier supplier) { +< return getOrAdd(name, new MetricBuilder() { +< >>>>>>> TEMP_RIGHT_BRANCH:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +296d285 +< <<<<<<< HEAD:metrics-core/src/main/java/io/dropwizard/metrics5/MetricRegistry.java +298,302d286 +< ||||||| 63d5c8f68:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +< public Gauge newMetric() { +< ======= +< public T newMetric() { +< >>>>>>> TEMP_RIGHT_BRANCH:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java diff --git a/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..86f4628a30 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort_ignorespace/diff_pom.xml.txt @@ -0,0 +1,234 @@ +====1 +1:5c + io.dropwizard.metrics +2:5c +3:5c + io.dropwizard.metrics5 +==== +1:7c + 4.1.17-SNAPSHOT +2:7,13c + <<<<<<< HEAD + 5.0.0-rc5-SNAPSHOT + ||||||| 63d5c8f68 + 4.1.17-SNAPSHOT + ======= + 4.2.0-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:7c + 5.0.0-rc5-SNAPSHOT +====1 +1:9c + Metrics Parent +2:15c +3:9c + Metrics5 Parent +====1 +1:22a +2:29c +3:23c + metrics-healthchecks +====1 +1:25c + metrics-healthchecks +2:32c +3:26c + metrics-influxdb +====1 +1:28a +2:36,37c +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:30c + metrics-jcstress +2:38a +3:32a +====1 +1:38c + metrics-jmx +2:45a +3:39a +====1 +1:44a +2:52,55c +3:46,49c + metrics-jcstress + metrics-jmx + metrics-legacy-adapter + metrics-legacy-adapter-healthchecks +==== +1:52c + 2.9.10.7 +2:63,69c + <<<<<<< HEAD + 9.4.34.v20201102 + ||||||| 63d5c8f68 + 2.9.10.7 + 9.4.35.v20201120 + ======= + 2.12.1 +3:57c + 2.12.1 +====2 +1:53a +3:58a +2:71c + >>>>>>> TEMP_RIGHT_BRANCH +==== +1:58c + 3.6.28 +2:76,82c + <<<<<<< HEAD + 3.6.0 + ||||||| 63d5c8f68 + 3.6.28 + ======= + 3.7.0 + >>>>>>> TEMP_RIGHT_BRANCH +3:63c + 3.7.0 +====1 +1:118c + sonatype-nexus-snapshots +2:142c +3:123c + ossrh +====1 +1:123c + sonatype-nexus-staging +2:147c +3:128c + ossrh +====1 +1:190a +2:215,217c +3:196,198c + + EDA86E9FB607B5FC9223FB767D4868B53E31E7AD + +====1 +1:194a +2:222,253c +3:203,234c + maven-source-plugin + 3.2.1 + + + attach-sources + + jar + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + 3.2.0 + + 8 + none + true + true + + + + attach-javadocs + + jar + + + + + + org.apache.maven.plugins +====1 +1:199c + --no-tty +2:258,259c +3:239,240c + --pinentry-mode + loopback +====1 +1:211a +2:272,290c +3:253,271c + + org.sonatype.plugins + nexus-staging-maven-plugin + 1.6.8 + + ossrh + https://oss.sonatype.org/ + true + + + + nexus-deploy + deploy + + deploy + + + + +====1 +1:215c + +2:293a +3:274a +====1 +1:219,227c + + + + org.apache.maven.plugins + maven-deploy-plugin + 2.8.2 + + + +2:296a +3:277a +====1 +1:283,295c + maven-source-plugin + 3.2.1 + + + attach-sources + + jar + + + + + + org.apache.maven.plugins +2:351a +3:332a +====1 +1:319,337c + maven-javadoc-plugin + 3.2.0 + + 8 + none + true + true + + + + attach-javadocs + + jar + + + + + + org.apache.maven.plugins +2:374a +3:355a diff --git a/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort_imports/diff_MetricRegistry.java.txt b/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort_imports/diff_MetricRegistry.java.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort_imports/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort_imports/diff_pom.xml.txt new file mode 100644 index 0000000000..63000511b5 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort_imports/diff_pom.xml.txt @@ -0,0 +1,212 @@ +====1 +1:5c + io.dropwizard.metrics +2:5c +3:5c + io.dropwizard.metrics5 +====1 +1:7c + 4.1.17-SNAPSHOT +2:7c +3:7c + 5.0.0-rc5-SNAPSHOT +====1 +1:9c + Metrics Parent +2:9c +3:9c + Metrics5 Parent +====1 +1:22a +2:23c +3:23c + metrics-healthchecks +====1 +1:25c + metrics-healthchecks +2:26c +3:26c + metrics-influxdb +====3 +1:28a +2:29a +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:30c + metrics-jcstress +2:30a +3:32a +====1 +1:38c + metrics-jmx +2:37a +3:39a +====1 +1:44a +2:44,47c +3:46,49c + metrics-jcstress + metrics-jmx + metrics-legacy-adapter + metrics-legacy-adapter-healthchecks +==== +1:52,53c + 2.9.10.7 + 9.4.35.v20201120 +2:55c + 9.4.34.v20201102 +3:57,58c + 2.12.1 + 9.4.35.v20201120 +==== +1:58c + 3.6.28 +2:60c + 3.6.0 +3:63c + 3.7.0 +====3 +1:118c +2:120c + sonatype-nexus-snapshots +3:123c + ossrh +====3 +1:123c +2:125c + sonatype-nexus-staging +3:128c + ossrh +====3 +1:190a +2:192a +3:196,198c + + EDA86E9FB607B5FC9223FB767D4868B53E31E7AD + +====3 +1:194a +2:196a +3:203,234c + maven-source-plugin + 3.2.1 + + + attach-sources + + jar + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + 3.2.0 + + 8 + none + true + true + + + + attach-javadocs + + jar + + + + + + org.apache.maven.plugins +====3 +1:199c +2:201c + --no-tty +3:239,240c + --pinentry-mode + loopback +====3 +1:211a +2:213a +3:253,271c + + org.sonatype.plugins + nexus-staging-maven-plugin + 1.6.8 + + ossrh + https://oss.sonatype.org/ + true + + + + nexus-deploy + deploy + + deploy + + + + +====3 +1:215c +2:217c + +3:274a +====1 +1:219,227c + + + + org.apache.maven.plugins + maven-deploy-plugin + 2.8.2 + + + +2:220a +3:277a +====3 +1:283,295c +2:276,288c + maven-source-plugin + 3.2.1 + + + attach-sources + + jar + + + + + + org.apache.maven.plugins +3:332a +====3 +1:319,337c +2:312,330c + maven-javadoc-plugin + 3.2.0 + + 8 + none + true + true + + + + attach-javadocs + + jar + + + + + + org.apache.maven.plugins +3:355a diff --git a/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort_imports_ignorespace/diff_MetricRegistry.java.txt b/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort_imports_ignorespace/diff_MetricRegistry.java.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..63000511b5 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt @@ -0,0 +1,212 @@ +====1 +1:5c + io.dropwizard.metrics +2:5c +3:5c + io.dropwizard.metrics5 +====1 +1:7c + 4.1.17-SNAPSHOT +2:7c +3:7c + 5.0.0-rc5-SNAPSHOT +====1 +1:9c + Metrics Parent +2:9c +3:9c + Metrics5 Parent +====1 +1:22a +2:23c +3:23c + metrics-healthchecks +====1 +1:25c + metrics-healthchecks +2:26c +3:26c + metrics-influxdb +====3 +1:28a +2:29a +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:30c + metrics-jcstress +2:30a +3:32a +====1 +1:38c + metrics-jmx +2:37a +3:39a +====1 +1:44a +2:44,47c +3:46,49c + metrics-jcstress + metrics-jmx + metrics-legacy-adapter + metrics-legacy-adapter-healthchecks +==== +1:52,53c + 2.9.10.7 + 9.4.35.v20201120 +2:55c + 9.4.34.v20201102 +3:57,58c + 2.12.1 + 9.4.35.v20201120 +==== +1:58c + 3.6.28 +2:60c + 3.6.0 +3:63c + 3.7.0 +====3 +1:118c +2:120c + sonatype-nexus-snapshots +3:123c + ossrh +====3 +1:123c +2:125c + sonatype-nexus-staging +3:128c + ossrh +====3 +1:190a +2:192a +3:196,198c + + EDA86E9FB607B5FC9223FB767D4868B53E31E7AD + +====3 +1:194a +2:196a +3:203,234c + maven-source-plugin + 3.2.1 + + + attach-sources + + jar + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + 3.2.0 + + 8 + none + true + true + + + + attach-javadocs + + jar + + + + + + org.apache.maven.plugins +====3 +1:199c +2:201c + --no-tty +3:239,240c + --pinentry-mode + loopback +====3 +1:211a +2:213a +3:253,271c + + org.sonatype.plugins + nexus-staging-maven-plugin + 1.6.8 + + ossrh + https://oss.sonatype.org/ + true + + + + nexus-deploy + deploy + + deploy + + + + +====3 +1:215c +2:217c + +3:274a +====1 +1:219,227c + + + + org.apache.maven.plugins + maven-deploy-plugin + 2.8.2 + + + +2:220a +3:277a +====3 +1:283,295c +2:276,288c + maven-source-plugin + 3.2.1 + + + attach-sources + + jar + + + + + + org.apache.maven.plugins +3:332a +====3 +1:319,337c +2:312,330c + maven-javadoc-plugin + 3.2.0 + + 8 + none + true + true + + + + attach-javadocs + + jar + + + + + + org.apache.maven.plugins +3:355a diff --git a/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_histogram/diff_MetricRegistry.java.txt b/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_histogram/diff_MetricRegistry.java.txt new file mode 100644 index 0000000000..6cb2c8aeb1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_histogram/diff_MetricRegistry.java.txt @@ -0,0 +1,20 @@ +283d282 +< <<<<<<< HEAD:metrics-core/src/main/java/io/dropwizard/metrics5/MetricRegistry.java +286,294d284 +< ||||||| 63d5c8f68:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +< @SuppressWarnings("rawtypes") +< public Gauge gauge(String name, final MetricSupplier supplier) { +< return getOrAdd(name, new MetricBuilder() { +< ======= +< @SuppressWarnings("rawtypes") +< public T gauge(String name, final MetricSupplier supplier) { +< return getOrAdd(name, new MetricBuilder() { +< >>>>>>> TEMP_RIGHT_BRANCH:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +296d285 +< <<<<<<< HEAD:metrics-core/src/main/java/io/dropwizard/metrics5/MetricRegistry.java +298,302d286 +< ||||||| 63d5c8f68:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +< public Gauge newMetric() { +< ======= +< public T newMetric() { +< >>>>>>> TEMP_RIGHT_BRANCH:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java diff --git a/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_histogram/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_histogram/diff_pom.xml.txt new file mode 100644 index 0000000000..dab07494ee --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_histogram/diff_pom.xml.txt @@ -0,0 +1,25 @@ +====1 +1:6c + io.dropwizard.metrics +2:6c +3:6c + io.dropwizard.metrics5 +==== +1:8c + 4.1.17-SNAPSHOT +2:8,14c + <<<<<<< HEAD + 5.0.0-rc5-SNAPSHOT + ||||||| 63d5c8f68 + 4.1.17-SNAPSHOT + ======= + 4.2.0-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:8c + 5.0.0-rc5-SNAPSHOT +====1 +1:12c + Metrics Documentation +2:18c +3:12c + Metrics5 Documentation diff --git a/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_ignorespace/diff_MetricRegistry.java.txt b/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_ignorespace/diff_MetricRegistry.java.txt new file mode 100644 index 0000000000..6cb2c8aeb1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_ignorespace/diff_MetricRegistry.java.txt @@ -0,0 +1,20 @@ +283d282 +< <<<<<<< HEAD:metrics-core/src/main/java/io/dropwizard/metrics5/MetricRegistry.java +286,294d284 +< ||||||| 63d5c8f68:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +< @SuppressWarnings("rawtypes") +< public Gauge gauge(String name, final MetricSupplier supplier) { +< return getOrAdd(name, new MetricBuilder() { +< ======= +< @SuppressWarnings("rawtypes") +< public T gauge(String name, final MetricSupplier supplier) { +< return getOrAdd(name, new MetricBuilder() { +< >>>>>>> TEMP_RIGHT_BRANCH:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +296d285 +< <<<<<<< HEAD:metrics-core/src/main/java/io/dropwizard/metrics5/MetricRegistry.java +298,302d286 +< ||||||| 63d5c8f68:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +< public Gauge newMetric() { +< ======= +< public T newMetric() { +< >>>>>>> TEMP_RIGHT_BRANCH:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java diff --git a/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..dab07494ee --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_ignorespace/diff_pom.xml.txt @@ -0,0 +1,25 @@ +====1 +1:6c + io.dropwizard.metrics +2:6c +3:6c + io.dropwizard.metrics5 +==== +1:8c + 4.1.17-SNAPSHOT +2:8,14c + <<<<<<< HEAD + 5.0.0-rc5-SNAPSHOT + ||||||| 63d5c8f68 + 4.1.17-SNAPSHOT + ======= + 4.2.0-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:8c + 5.0.0-rc5-SNAPSHOT +====1 +1:12c + Metrics Documentation +2:18c +3:12c + Metrics5 Documentation diff --git a/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_minimal/diff_MetricRegistry.java.txt b/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_minimal/diff_MetricRegistry.java.txt new file mode 100644 index 0000000000..6cb2c8aeb1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_minimal/diff_MetricRegistry.java.txt @@ -0,0 +1,20 @@ +283d282 +< <<<<<<< HEAD:metrics-core/src/main/java/io/dropwizard/metrics5/MetricRegistry.java +286,294d284 +< ||||||| 63d5c8f68:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +< @SuppressWarnings("rawtypes") +< public Gauge gauge(String name, final MetricSupplier supplier) { +< return getOrAdd(name, new MetricBuilder() { +< ======= +< @SuppressWarnings("rawtypes") +< public T gauge(String name, final MetricSupplier supplier) { +< return getOrAdd(name, new MetricBuilder() { +< >>>>>>> TEMP_RIGHT_BRANCH:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +296d285 +< <<<<<<< HEAD:metrics-core/src/main/java/io/dropwizard/metrics5/MetricRegistry.java +298,302d286 +< ||||||| 63d5c8f68:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +< public Gauge newMetric() { +< ======= +< public T newMetric() { +< >>>>>>> TEMP_RIGHT_BRANCH:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java diff --git a/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_minimal/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_minimal/diff_pom.xml.txt new file mode 100644 index 0000000000..dab07494ee --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_minimal/diff_pom.xml.txt @@ -0,0 +1,25 @@ +====1 +1:6c + io.dropwizard.metrics +2:6c +3:6c + io.dropwizard.metrics5 +==== +1:8c + 4.1.17-SNAPSHOT +2:8,14c + <<<<<<< HEAD + 5.0.0-rc5-SNAPSHOT + ||||||| 63d5c8f68 + 4.1.17-SNAPSHOT + ======= + 4.2.0-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:8c + 5.0.0-rc5-SNAPSHOT +====1 +1:12c + Metrics Documentation +2:18c +3:12c + Metrics5 Documentation diff --git a/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_myers/diff_MetricRegistry.java.txt b/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_myers/diff_MetricRegistry.java.txt new file mode 100644 index 0000000000..6cb2c8aeb1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_myers/diff_MetricRegistry.java.txt @@ -0,0 +1,20 @@ +283d282 +< <<<<<<< HEAD:metrics-core/src/main/java/io/dropwizard/metrics5/MetricRegistry.java +286,294d284 +< ||||||| 63d5c8f68:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +< @SuppressWarnings("rawtypes") +< public Gauge gauge(String name, final MetricSupplier supplier) { +< return getOrAdd(name, new MetricBuilder() { +< ======= +< @SuppressWarnings("rawtypes") +< public T gauge(String name, final MetricSupplier supplier) { +< return getOrAdd(name, new MetricBuilder() { +< >>>>>>> TEMP_RIGHT_BRANCH:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +296d285 +< <<<<<<< HEAD:metrics-core/src/main/java/io/dropwizard/metrics5/MetricRegistry.java +298,302d286 +< ||||||| 63d5c8f68:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +< public Gauge newMetric() { +< ======= +< public T newMetric() { +< >>>>>>> TEMP_RIGHT_BRANCH:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java diff --git a/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_myers/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_myers/diff_pom.xml.txt new file mode 100644 index 0000000000..dab07494ee --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_myers/diff_pom.xml.txt @@ -0,0 +1,25 @@ +====1 +1:6c + io.dropwizard.metrics +2:6c +3:6c + io.dropwizard.metrics5 +==== +1:8c + 4.1.17-SNAPSHOT +2:8,14c + <<<<<<< HEAD + 5.0.0-rc5-SNAPSHOT + ||||||| 63d5c8f68 + 4.1.17-SNAPSHOT + ======= + 4.2.0-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:8c + 5.0.0-rc5-SNAPSHOT +====1 +1:12c + Metrics Documentation +2:18c +3:12c + Metrics5 Documentation diff --git a/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_patience/diff_MetricRegistry.java.txt b/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_patience/diff_MetricRegistry.java.txt new file mode 100644 index 0000000000..6cb2c8aeb1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_patience/diff_MetricRegistry.java.txt @@ -0,0 +1,20 @@ +283d282 +< <<<<<<< HEAD:metrics-core/src/main/java/io/dropwizard/metrics5/MetricRegistry.java +286,294d284 +< ||||||| 63d5c8f68:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +< @SuppressWarnings("rawtypes") +< public Gauge gauge(String name, final MetricSupplier supplier) { +< return getOrAdd(name, new MetricBuilder() { +< ======= +< @SuppressWarnings("rawtypes") +< public T gauge(String name, final MetricSupplier supplier) { +< return getOrAdd(name, new MetricBuilder() { +< >>>>>>> TEMP_RIGHT_BRANCH:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +296d285 +< <<<<<<< HEAD:metrics-core/src/main/java/io/dropwizard/metrics5/MetricRegistry.java +298,302d286 +< ||||||| 63d5c8f68:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +< public Gauge newMetric() { +< ======= +< public T newMetric() { +< >>>>>>> TEMP_RIGHT_BRANCH:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java diff --git a/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_patience/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_patience/diff_pom.xml.txt new file mode 100644 index 0000000000..dab07494ee --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/gitmerge_recursive_patience/diff_pom.xml.txt @@ -0,0 +1,25 @@ +====1 +1:6c + io.dropwizard.metrics +2:6c +3:6c + io.dropwizard.metrics5 +==== +1:8c + 4.1.17-SNAPSHOT +2:8,14c + <<<<<<< HEAD + 5.0.0-rc5-SNAPSHOT + ||||||| 63d5c8f68 + 4.1.17-SNAPSHOT + ======= + 4.2.0-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:8c + 5.0.0-rc5-SNAPSHOT +====1 +1:12c + Metrics Documentation +2:18c +3:12c + Metrics5 Documentation diff --git a/src/python/merge_conflict_analysis_diffs/582/intellimerge/diff_MetricRegistry.java.txt b/src/python/merge_conflict_analysis_diffs/582/intellimerge/diff_MetricRegistry.java.txt new file mode 100644 index 0000000000..6cb2c8aeb1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/intellimerge/diff_MetricRegistry.java.txt @@ -0,0 +1,20 @@ +283d282 +< <<<<<<< HEAD:metrics-core/src/main/java/io/dropwizard/metrics5/MetricRegistry.java +286,294d284 +< ||||||| 63d5c8f68:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +< @SuppressWarnings("rawtypes") +< public Gauge gauge(String name, final MetricSupplier supplier) { +< return getOrAdd(name, new MetricBuilder() { +< ======= +< @SuppressWarnings("rawtypes") +< public T gauge(String name, final MetricSupplier supplier) { +< return getOrAdd(name, new MetricBuilder() { +< >>>>>>> TEMP_RIGHT_BRANCH:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +296d285 +< <<<<<<< HEAD:metrics-core/src/main/java/io/dropwizard/metrics5/MetricRegistry.java +298,302d286 +< ||||||| 63d5c8f68:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java +< public Gauge newMetric() { +< ======= +< public T newMetric() { +< >>>>>>> TEMP_RIGHT_BRANCH:metrics-core/src/main/java/com/codahale/metrics/MetricRegistry.java diff --git a/src/python/merge_conflict_analysis_diffs/582/intellimerge/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/582/intellimerge/diff_pom.xml.txt new file mode 100644 index 0000000000..86f4628a30 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/intellimerge/diff_pom.xml.txt @@ -0,0 +1,234 @@ +====1 +1:5c + io.dropwizard.metrics +2:5c +3:5c + io.dropwizard.metrics5 +==== +1:7c + 4.1.17-SNAPSHOT +2:7,13c + <<<<<<< HEAD + 5.0.0-rc5-SNAPSHOT + ||||||| 63d5c8f68 + 4.1.17-SNAPSHOT + ======= + 4.2.0-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:7c + 5.0.0-rc5-SNAPSHOT +====1 +1:9c + Metrics Parent +2:15c +3:9c + Metrics5 Parent +====1 +1:22a +2:29c +3:23c + metrics-healthchecks +====1 +1:25c + metrics-healthchecks +2:32c +3:26c + metrics-influxdb +====1 +1:28a +2:36,37c +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:30c + metrics-jcstress +2:38a +3:32a +====1 +1:38c + metrics-jmx +2:45a +3:39a +====1 +1:44a +2:52,55c +3:46,49c + metrics-jcstress + metrics-jmx + metrics-legacy-adapter + metrics-legacy-adapter-healthchecks +==== +1:52c + 2.9.10.7 +2:63,69c + <<<<<<< HEAD + 9.4.34.v20201102 + ||||||| 63d5c8f68 + 2.9.10.7 + 9.4.35.v20201120 + ======= + 2.12.1 +3:57c + 2.12.1 +====2 +1:53a +3:58a +2:71c + >>>>>>> TEMP_RIGHT_BRANCH +==== +1:58c + 3.6.28 +2:76,82c + <<<<<<< HEAD + 3.6.0 + ||||||| 63d5c8f68 + 3.6.28 + ======= + 3.7.0 + >>>>>>> TEMP_RIGHT_BRANCH +3:63c + 3.7.0 +====1 +1:118c + sonatype-nexus-snapshots +2:142c +3:123c + ossrh +====1 +1:123c + sonatype-nexus-staging +2:147c +3:128c + ossrh +====1 +1:190a +2:215,217c +3:196,198c + + EDA86E9FB607B5FC9223FB767D4868B53E31E7AD + +====1 +1:194a +2:222,253c +3:203,234c + maven-source-plugin + 3.2.1 + + + attach-sources + + jar + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + 3.2.0 + + 8 + none + true + true + + + + attach-javadocs + + jar + + + + + + org.apache.maven.plugins +====1 +1:199c + --no-tty +2:258,259c +3:239,240c + --pinentry-mode + loopback +====1 +1:211a +2:272,290c +3:253,271c + + org.sonatype.plugins + nexus-staging-maven-plugin + 1.6.8 + + ossrh + https://oss.sonatype.org/ + true + + + + nexus-deploy + deploy + + deploy + + + + +====1 +1:215c + +2:293a +3:274a +====1 +1:219,227c + + + + org.apache.maven.plugins + maven-deploy-plugin + 2.8.2 + + + +2:296a +3:277a +====1 +1:283,295c + maven-source-plugin + 3.2.1 + + + attach-sources + + jar + + + + + + org.apache.maven.plugins +2:351a +3:332a +====1 +1:319,337c + maven-javadoc-plugin + 3.2.0 + + 8 + none + true + true + + + + attach-javadocs + + jar + + + + + + org.apache.maven.plugins +2:374a +3:355a diff --git a/src/python/merge_conflict_analysis_diffs/582/spork/diff_Graphite.java.txt b/src/python/merge_conflict_analysis_diffs/582/spork/diff_Graphite.java.txt new file mode 100644 index 0000000000..3be4905939 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/spork/diff_Graphite.java.txt @@ -0,0 +1,33 @@ +12d11 +< import java.net.InetAddress; +17a17 +> import static java.util.Objects.requireNonNull; +68a69,76 +> if (hostname == null || hostname.isEmpty()) { +> throw new IllegalArgumentException("hostname must not be null or empty"); +> } +> +> if (port < 0 || port > 65535) { +> throw new IllegalArgumentException("port must be a valid IP port (0-65535)"); +> } +> +72,73c80,81 +< this.socketFactory = socketFactory; +< this.charset = charset; +--- +> this.socketFactory = requireNonNull(socketFactory, "socketFactory must not be null"); +> this.charset = requireNonNull(charset, "charset must not be null"); +107,109c115,117 +< this.address = address; +< this.socketFactory = socketFactory; +< this.charset = charset; +--- +> this.address = requireNonNull(address, "address must not be null"); +> this.socketFactory = requireNonNull(socketFactory, "socketFactory must not be null"); +> this.charset = requireNonNull(charset, "charset must not be null"); +122,123c130,131 +< if (address == null || hostname != null) { +< address = new InetSocketAddress(InetAddress.getByName(hostname), port); +--- +> if (address == null || address.getHostName() == null && hostname != null) { +> address = new InetSocketAddress(hostname, port); diff --git a/src/python/merge_conflict_analysis_diffs/582/spork/diff_GraphiteTest.java.txt b/src/python/merge_conflict_analysis_diffs/582/spork/diff_GraphiteTest.java.txt new file mode 100644 index 0000000000..ffb8d45ec9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/spork/diff_GraphiteTest.java.txt @@ -0,0 +1,117 @@ +7d6 +< +8a8 +> import java.io.IOException; +16c16,17 +< import static org.assertj.core.api.Fail.failBecauseExceptionWasNotThrown; +--- +> import static org.assertj.core.api.Assertions.assertThatNoException; +> import static org.assertj.core.api.Assertions.assertThatThrownBy; +35,36d35 +< private Graphite graphite; +< +66,68c65,67 +< graphite = new Graphite(address, socketFactory); +< graphite.connect(); +< +--- +> try (Graphite graphite = new Graphite(address, socketFactory)) { +> graphite.connect(); +> } +74,76c73,75 +< graphite = new Graphite(host, port, socketFactory); +< graphite.connect(); +< +--- +> try (Graphite graphite = new Graphite(host, port, socketFactory)) { +> graphite.connect(); +> } +81,84c80,83 +< public void measuresFailures() { +< graphite = new Graphite(address, socketFactory); +< assertThat(graphite.getFailures()) +< .isZero(); +--- +> public void measuresFailures() throws IOException { +> try (Graphite graphite = new Graphite(address, socketFactory)) { +> assertThat(graphite.getFailures()).isZero(); +> } +89,91c88,90 +< graphite = new Graphite(address, socketFactory); +< graphite.connect(); +< graphite.close(); +--- +> try (Graphite graphite = new Graphite(address, socketFactory)) { +> graphite.connect(); +> } +98,105c97,101 +< graphite = new Graphite(address, socketFactory); +< graphite.connect(); +< try { +< graphite.connect(); +< failBecauseExceptionWasNotThrown(IllegalStateException.class); +< } catch (IllegalStateException e) { +< assertThat(e.getMessage()) +< .isEqualTo("Already connected"); +--- +> try (Graphite graphite = new Graphite(address, socketFactory)) { +> assertThatNoException().isThrownBy(graphite::connect); +> assertThatThrownBy(graphite::connect) +> .isInstanceOf(IllegalStateException.class) +> .hasMessage("Already connected"); +111,117c107,111 +< graphite = new Graphite(address, socketFactory); +< graphite.connect(); +< graphite.send("name", "value", 100); +< graphite.close(); +< +< assertThat(output.toString()) +< .isEqualTo("name value 100\n"); +--- +> try (Graphite graphite = new Graphite(address, socketFactory)) { +> graphite.connect(); +> graphite.send("name", "value", 100); +> } +> assertThat(output).hasToString("name value 100\n"); +122,128c116,120 +< graphite = new Graphite(address, socketFactory); +< graphite.connect(); +< graphite.send("name woo", "value", 100); +< graphite.close(); +< +< assertThat(output.toString()) +< .isEqualTo("name-woo value 100\n"); +--- +> try (Graphite graphite = new Graphite(address, socketFactory)) { +> graphite.connect(); +> graphite.send("name woo", "value", 100); +> } +> assertThat(output).hasToString("name-woo value 100\n"); +133,139c125,129 +< graphite = new Graphite(address, socketFactory); +< graphite.connect(); +< graphite.send("name", "value woo", 100); +< graphite.close(); +< +< assertThat(output.toString()) +< .isEqualTo("name value-woo 100\n"); +--- +> try (Graphite graphite = new Graphite(address, socketFactory)) { +> graphite.connect(); +> graphite.send("name", "value woo", 100); +> } +> assertThat(output).hasToString("name value-woo 100\n"); +143c133 +< public void notifiesIfGraphiteIsUnavailable() { +--- +> public void notifiesIfGraphiteIsUnavailable() throws IOException { +148,152c138,140 +< unavailableGraphite.connect(); +< failBecauseExceptionWasNotThrown(UnknownHostException.class); +< } catch (Exception e) { +< assertThat(e.getMessage()) +< .isEqualTo(unavailableHost); +--- +> assertThatThrownBy(unavailableGraphite::connect) +> .isInstanceOf(UnknownHostException.class) +> .hasMessage(unavailableHost); diff --git a/src/python/merge_conflict_analysis_diffs/582/spork/diff_MetricRegistry.java.txt b/src/python/merge_conflict_analysis_diffs/582/spork/diff_MetricRegistry.java.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/python/merge_conflict_analysis_diffs/582/spork/diff_Timer.java.txt b/src/python/merge_conflict_analysis_diffs/582/spork/diff_Timer.java.txt new file mode 100644 index 0000000000..400527979f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/spork/diff_Timer.java.txt @@ -0,0 +1,4 @@ +23c23 +< private Context(Timer timer, Clock clock) { +--- +> Context(Timer timer, Clock clock) { diff --git a/src/python/merge_conflict_analysis_diffs/582/spork/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/582/spork/diff_pom.xml.txt new file mode 100644 index 0000000000..63000511b5 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/582/spork/diff_pom.xml.txt @@ -0,0 +1,212 @@ +====1 +1:5c + io.dropwizard.metrics +2:5c +3:5c + io.dropwizard.metrics5 +====1 +1:7c + 4.1.17-SNAPSHOT +2:7c +3:7c + 5.0.0-rc5-SNAPSHOT +====1 +1:9c + Metrics Parent +2:9c +3:9c + Metrics5 Parent +====1 +1:22a +2:23c +3:23c + metrics-healthchecks +====1 +1:25c + metrics-healthchecks +2:26c +3:26c + metrics-influxdb +====3 +1:28a +2:29a +3:30,31c + metrics-jakarta-servlet + metrics-jakarta-servlets +====1 +1:30c + metrics-jcstress +2:30a +3:32a +====1 +1:38c + metrics-jmx +2:37a +3:39a +====1 +1:44a +2:44,47c +3:46,49c + metrics-jcstress + metrics-jmx + metrics-legacy-adapter + metrics-legacy-adapter-healthchecks +==== +1:52,53c + 2.9.10.7 + 9.4.35.v20201120 +2:55c + 9.4.34.v20201102 +3:57,58c + 2.12.1 + 9.4.35.v20201120 +==== +1:58c + 3.6.28 +2:60c + 3.6.0 +3:63c + 3.7.0 +====3 +1:118c +2:120c + sonatype-nexus-snapshots +3:123c + ossrh +====3 +1:123c +2:125c + sonatype-nexus-staging +3:128c + ossrh +====3 +1:190a +2:192a +3:196,198c + + EDA86E9FB607B5FC9223FB767D4868B53E31E7AD + +====3 +1:194a +2:196a +3:203,234c + maven-source-plugin + 3.2.1 + + + attach-sources + + jar + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + 3.2.0 + + 8 + none + true + true + + + + attach-javadocs + + jar + + + + + + org.apache.maven.plugins +====3 +1:199c +2:201c + --no-tty +3:239,240c + --pinentry-mode + loopback +====3 +1:211a +2:213a +3:253,271c + + org.sonatype.plugins + nexus-staging-maven-plugin + 1.6.8 + + ossrh + https://oss.sonatype.org/ + true + + + + nexus-deploy + deploy + + deploy + + + + +====3 +1:215c +2:217c + +3:274a +====1 +1:219,227c + + + + org.apache.maven.plugins + maven-deploy-plugin + 2.8.2 + + + +2:220a +3:277a +====3 +1:283,295c +2:276,288c + maven-source-plugin + 3.2.1 + + + attach-sources + + jar + + + + + + org.apache.maven.plugins +3:332a +====3 +1:319,337c +2:312,330c + maven-javadoc-plugin + 3.2.0 + + 8 + none + true + true + + + + attach-javadocs + + jar + + + + + + org.apache.maven.plugins +3:355a diff --git a/src/python/merge_conflict_analysis_diffs/65/git_hires_merge/diff_HttpClientManager.java.txt b/src/python/merge_conflict_analysis_diffs/65/git_hires_merge/diff_HttpClientManager.java.txt new file mode 100644 index 0000000000..f2d16f1cdc --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/git_hires_merge/diff_HttpClientManager.java.txt @@ -0,0 +1,101 @@ +====1 +1:18a +2:19,22c +3:19,22c + import com.alibaba.nacos.common.http.client.ApacheAsyncHttpClientRequest; + import com.alibaba.nacos.common.http.client.ApacheHttpClientRequest; + import com.alibaba.nacos.common.http.client.NacosAsyncRestTemplate; + import com.alibaba.nacos.common.http.client.NacosRestTemplate; +====3 +1:19a +2:23a +3:24c + import com.alibaba.nacos.common.utils.ExceptionUtil; +====3 +1:25a +2:29a +3:31,32c + import java.util.concurrent.atomic.AtomicBoolean; + +==== +1:45a +2:50,56c + + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + +3:53,61c + + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final AtomicBoolean alreadyShutdown = new AtomicBoolean(false); + +==== +1:51,58c + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); +2:62,71c + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); +3:67c + shutdown(); +==== +1:71a +2:85,92c + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + +3:81,107c + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + + public static void shutdown() { + if (!alreadyShutdown.compareAndSet(false, true)) { + return; + } + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ex) { + logger.error("An exception occurred when the HTTP client was closed : {}", + ExceptionUtil.getStackTrace(ex)); + } + logger.warn("[HttpClientManager] Destruction of the end"); + } + + diff --git a/src/python/merge_conflict_analysis_diffs/65/git_hires_merge/diff_JacksonUtils.java.txt b/src/python/merge_conflict_analysis_diffs/65/git_hires_merge/diff_JacksonUtils.java.txt new file mode 100644 index 0000000000..4362955cdf --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/git_hires_merge/diff_JacksonUtils.java.txt @@ -0,0 +1,175 @@ +====3 +1:18a +2:18a +3:19,23c + import com.alibaba.nacos.api.exception.runtime.NacosDeserializationException; + import com.alibaba.nacos.api.exception.runtime.NacosSerializationException; + import com.fasterxml.jackson.annotation.JsonInclude.Include; + import com.fasterxml.jackson.core.JsonProcessingException; + import com.fasterxml.jackson.core.type.TypeReference; +====3 +1:19a +2:19a +3:25c + import com.fasterxml.jackson.databind.JsonNode; +====3 +1:20a +2:20a +3:27,29c + import com.fasterxml.jackson.databind.jsontype.NamedType; + import com.fasterxml.jackson.databind.node.ArrayNode; + import com.fasterxml.jackson.databind.node.ObjectNode; +==== +1:21a +2:22c + import java.io.InputStream; +3:31,32c + import java.io.InputStream; + import java.io.IOException +====3 +1:32a +2:33a +3:44c + mapper.setSerializationInclusion(Include.NON_NULL); +==== +1:35,49c + public static String toJson(Object obj) throws Exception { + return mapper.writeValueAsString(obj); + } + + public static byte[] toJsonBytes(Object obj) throws Exception { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } + + public static T toObj(byte[] json, Class cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(byte[] json, Type cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } +2:36,54c + public static String toJson(Object obj) throws Exception { + return mapper.writeValueAsString(obj); + } + + public static byte[] toJsonBytes(Object obj) throws Exception { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } + + public static T toObj(byte[] json, Class cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(byte[] json, Type cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +3:47,82c + public static String toJson(Object obj) { + try { + return mapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static byte[] toJsonBytes(Object obj) { + try { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static T toObj(byte[] json, Class cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(byte[] json, Type cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +====3 +1:53c +2:58c + } +3:86c + } +====3 +1:55,57c +2:60,62c + public static T toObj(String json, Type type) throws Exception { + return mapper.readValue(json, mapper.constructType(type)); + } +3:88,142c + public static T toObj(byte[] json, TypeReference typeReference) { + try { + return toObj(StringUtils.newString4UTF8(json), typeReference); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, Class cls) { + try { + return mapper.readValue(json, cls); + } catch (IOException e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(String json, Type type) { + try { + return mapper.readValue(json, mapper.constructType(type)); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, TypeReference typeReference) { + try { + return mapper.readValue(json, typeReference); + } catch (IOException e) { + throw new NacosDeserializationException(typeReference.getClass(), e); + } + } + + public static JsonNode toObj(String json) { + try { + return mapper.readTree(json); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static void registerSubtype(Class clz, String type) { + mapper.registerSubtypes(new NamedType(clz, type)); + } + + public static ObjectNode createEmptyJsonNode() { + return new ObjectNode(mapper.getNodeFactory()); + } + + public static ArrayNode createEmptyArrayNode() { + return new ArrayNode(mapper.getNodeFactory()); + } + + public static JsonNode transferToJsonNode(Object obj) { + return mapper.valueToTree(obj); + } diff --git a/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort/diff_HttpClientManager.java.txt b/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort/diff_HttpClientManager.java.txt new file mode 100644 index 0000000000..98a510768a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort/diff_HttpClientManager.java.txt @@ -0,0 +1,147 @@ +==== +1:18a +2:19,27c + <<<<<<< HEAD + import com.alibaba.nacos.common.http.client.ApacheAsyncHttpClientRequest; + import com.alibaba.nacos.common.http.client.ApacheHttpClientRequest; + import com.alibaba.nacos.common.http.client.NacosAsyncRestTemplate; + import com.alibaba.nacos.common.http.client.NacosRestTemplate; + ||||||| d4efcf4df + ======= + import com.alibaba.nacos.common.utils.ExceptionUtil; + >>>>>>> TEMP_RIGHT_BRANCH +3:19,22c + import com.alibaba.nacos.common.http.client.ApacheAsyncHttpClientRequest; + import com.alibaba.nacos.common.http.client.ApacheHttpClientRequest; + import com.alibaba.nacos.common.http.client.NacosAsyncRestTemplate; + import com.alibaba.nacos.common.http.client.NacosRestTemplate; +====3 +1:19a +2:28a +3:24c + import com.alibaba.nacos.common.utils.ExceptionUtil; +====1 +1:25a +2:35,36c +3:31,32c + import java.util.concurrent.atomic.AtomicBoolean; + +==== +1:45a +2:57,68c + + <<<<<<< HEAD + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + ||||||| d4efcf4df + ======= + private static final AtomicBoolean alreadyShutdown = new AtomicBoolean(false); + >>>>>>> TEMP_RIGHT_BRANCH +3:53,61c + + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final AtomicBoolean alreadyShutdown = new AtomicBoolean(false); + +==== +1:51,58c + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); +2:74,96c + <<<<<<< HEAD + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); + ||||||| d4efcf4df + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); + ======= + shutdown(); + >>>>>>> TEMP_RIGHT_BRANCH +3:67c + shutdown(); +==== +1:71a +2:110,136c + <<<<<<< HEAD + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + ||||||| d4efcf4df + ======= + public static void shutdown() { + if (!alreadyShutdown.compareAndSet(false, true)) { + return; + } + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ex) { + logger.error("An exception occurred when the HTTP client was closed : {}", + ExceptionUtil.getStackTrace(ex)); + } + logger.warn("[HttpClientManager] Destruction of the end"); + } + >>>>>>> TEMP_RIGHT_BRANCH + +3:81,107c + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + + public static void shutdown() { + if (!alreadyShutdown.compareAndSet(false, true)) { + return; + } + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ex) { + logger.error("An exception occurred when the HTTP client was closed : {}", + ExceptionUtil.getStackTrace(ex)); + } + logger.warn("[HttpClientManager] Destruction of the end"); + } + + diff --git a/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort/diff_JacksonUtils.java.txt b/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort/diff_JacksonUtils.java.txt new file mode 100644 index 0000000000..0e754a4ff8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort/diff_JacksonUtils.java.txt @@ -0,0 +1,257 @@ +====1 +1:18a +2:19,23c +3:19,23c + import com.alibaba.nacos.api.exception.runtime.NacosDeserializationException; + import com.alibaba.nacos.api.exception.runtime.NacosSerializationException; + import com.fasterxml.jackson.annotation.JsonInclude.Include; + import com.fasterxml.jackson.core.JsonProcessingException; + import com.fasterxml.jackson.core.type.TypeReference; +====1 +1:19a +2:25c +3:25c + import com.fasterxml.jackson.databind.JsonNode; +====1 +1:20a +2:27,29c +3:27,29c + import com.fasterxml.jackson.databind.jsontype.NamedType; + import com.fasterxml.jackson.databind.node.ArrayNode; + import com.fasterxml.jackson.databind.node.ObjectNode; +==== +1:21a +2:31,36c + <<<<<<< HEAD + import java.io.InputStream; + ||||||| d4efcf4df + ======= + import java.io.IOException; + >>>>>>> TEMP_RIGHT_BRANCH +3:31,32c + import java.io.InputStream; + import java.io.IOException +====1 +1:32a +2:48c +3:44c + mapper.setSerializationInclusion(Include.NON_NULL); +==== +1:35,49c + public static String toJson(Object obj) throws Exception { + return mapper.writeValueAsString(obj); + } + + public static byte[] toJsonBytes(Object obj) throws Exception { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } + + public static T toObj(byte[] json, Class cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(byte[] json, Type cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } +2:51,86c + public static String toJson(Object obj) { + try { + return mapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static byte[] toJsonBytes(Object obj) { + try { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static T toObj(byte[] json, Class cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(byte[] json, Type cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + <<<<<<< HEAD + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +3:47,82c + public static String toJson(Object obj) { + try { + return mapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static byte[] toJsonBytes(Object obj) { + try { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static T toObj(byte[] json, Class cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(byte[] json, Type cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +==== +1:53,57c + } + + public static T toObj(String json, Type type) throws Exception { + return mapper.readValue(json, mapper.constructType(type)); + } +2:90,151c + } + ||||||| d4efcf4df + public static T toObj(String json, Class cls) throws Exception { + return mapper.readValue(json, cls); + } + ======= + public static T toObj(byte[] json, TypeReference typeReference) { + try { + return toObj(StringUtils.newString4UTF8(json), typeReference); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + >>>>>>> TEMP_RIGHT_BRANCH + + public static T toObj(String json, Class cls) { + try { + return mapper.readValue(json, cls); + } catch (IOException e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(String json, Type type) { + try { + return mapper.readValue(json, mapper.constructType(type)); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, TypeReference typeReference) { + try { + return mapper.readValue(json, typeReference); + } catch (IOException e) { + throw new NacosDeserializationException(typeReference.getClass(), e); + } + } + + public static JsonNode toObj(String json) { + try { + return mapper.readTree(json); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static void registerSubtype(Class clz, String type) { + mapper.registerSubtypes(new NamedType(clz, type)); + } + + public static ObjectNode createEmptyJsonNode() { + return new ObjectNode(mapper.getNodeFactory()); + } + + public static ArrayNode createEmptyArrayNode() { + return new ArrayNode(mapper.getNodeFactory()); + } + + public static JsonNode transferToJsonNode(Object obj) { + return mapper.valueToTree(obj); + } +3:86,142c + } + + public static T toObj(byte[] json, TypeReference typeReference) { + try { + return toObj(StringUtils.newString4UTF8(json), typeReference); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, Class cls) { + try { + return mapper.readValue(json, cls); + } catch (IOException e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(String json, Type type) { + try { + return mapper.readValue(json, mapper.constructType(type)); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, TypeReference typeReference) { + try { + return mapper.readValue(json, typeReference); + } catch (IOException e) { + throw new NacosDeserializationException(typeReference.getClass(), e); + } + } + + public static JsonNode toObj(String json) { + try { + return mapper.readTree(json); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static void registerSubtype(Class clz, String type) { + mapper.registerSubtypes(new NamedType(clz, type)); + } + + public static ObjectNode createEmptyJsonNode() { + return new ObjectNode(mapper.getNodeFactory()); + } + + public static ArrayNode createEmptyArrayNode() { + return new ArrayNode(mapper.getNodeFactory()); + } + + public static JsonNode transferToJsonNode(Object obj) { + return mapper.valueToTree(obj); + } diff --git a/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_adjacent/diff_HttpClientManager.java.txt b/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_adjacent/diff_HttpClientManager.java.txt new file mode 100644 index 0000000000..f2d16f1cdc --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_adjacent/diff_HttpClientManager.java.txt @@ -0,0 +1,101 @@ +====1 +1:18a +2:19,22c +3:19,22c + import com.alibaba.nacos.common.http.client.ApacheAsyncHttpClientRequest; + import com.alibaba.nacos.common.http.client.ApacheHttpClientRequest; + import com.alibaba.nacos.common.http.client.NacosAsyncRestTemplate; + import com.alibaba.nacos.common.http.client.NacosRestTemplate; +====3 +1:19a +2:23a +3:24c + import com.alibaba.nacos.common.utils.ExceptionUtil; +====3 +1:25a +2:29a +3:31,32c + import java.util.concurrent.atomic.AtomicBoolean; + +==== +1:45a +2:50,56c + + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + +3:53,61c + + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final AtomicBoolean alreadyShutdown = new AtomicBoolean(false); + +==== +1:51,58c + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); +2:62,71c + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); +3:67c + shutdown(); +==== +1:71a +2:85,92c + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + +3:81,107c + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + + public static void shutdown() { + if (!alreadyShutdown.compareAndSet(false, true)) { + return; + } + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ex) { + logger.error("An exception occurred when the HTTP client was closed : {}", + ExceptionUtil.getStackTrace(ex)); + } + logger.warn("[HttpClientManager] Destruction of the end"); + } + + diff --git a/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_adjacent/diff_JacksonUtils.java.txt b/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_adjacent/diff_JacksonUtils.java.txt new file mode 100644 index 0000000000..4362955cdf --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_adjacent/diff_JacksonUtils.java.txt @@ -0,0 +1,175 @@ +====3 +1:18a +2:18a +3:19,23c + import com.alibaba.nacos.api.exception.runtime.NacosDeserializationException; + import com.alibaba.nacos.api.exception.runtime.NacosSerializationException; + import com.fasterxml.jackson.annotation.JsonInclude.Include; + import com.fasterxml.jackson.core.JsonProcessingException; + import com.fasterxml.jackson.core.type.TypeReference; +====3 +1:19a +2:19a +3:25c + import com.fasterxml.jackson.databind.JsonNode; +====3 +1:20a +2:20a +3:27,29c + import com.fasterxml.jackson.databind.jsontype.NamedType; + import com.fasterxml.jackson.databind.node.ArrayNode; + import com.fasterxml.jackson.databind.node.ObjectNode; +==== +1:21a +2:22c + import java.io.InputStream; +3:31,32c + import java.io.InputStream; + import java.io.IOException +====3 +1:32a +2:33a +3:44c + mapper.setSerializationInclusion(Include.NON_NULL); +==== +1:35,49c + public static String toJson(Object obj) throws Exception { + return mapper.writeValueAsString(obj); + } + + public static byte[] toJsonBytes(Object obj) throws Exception { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } + + public static T toObj(byte[] json, Class cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(byte[] json, Type cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } +2:36,54c + public static String toJson(Object obj) throws Exception { + return mapper.writeValueAsString(obj); + } + + public static byte[] toJsonBytes(Object obj) throws Exception { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } + + public static T toObj(byte[] json, Class cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(byte[] json, Type cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +3:47,82c + public static String toJson(Object obj) { + try { + return mapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static byte[] toJsonBytes(Object obj) { + try { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static T toObj(byte[] json, Class cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(byte[] json, Type cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +====3 +1:53c +2:58c + } +3:86c + } +====3 +1:55,57c +2:60,62c + public static T toObj(String json, Type type) throws Exception { + return mapper.readValue(json, mapper.constructType(type)); + } +3:88,142c + public static T toObj(byte[] json, TypeReference typeReference) { + try { + return toObj(StringUtils.newString4UTF8(json), typeReference); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, Class cls) { + try { + return mapper.readValue(json, cls); + } catch (IOException e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(String json, Type type) { + try { + return mapper.readValue(json, mapper.constructType(type)); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, TypeReference typeReference) { + try { + return mapper.readValue(json, typeReference); + } catch (IOException e) { + throw new NacosDeserializationException(typeReference.getClass(), e); + } + } + + public static JsonNode toObj(String json) { + try { + return mapper.readTree(json); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static void registerSubtype(Class clz, String type) { + mapper.registerSubtypes(new NamedType(clz, type)); + } + + public static ObjectNode createEmptyJsonNode() { + return new ObjectNode(mapper.getNodeFactory()); + } + + public static ArrayNode createEmptyArrayNode() { + return new ArrayNode(mapper.getNodeFactory()); + } + + public static JsonNode transferToJsonNode(Object obj) { + return mapper.valueToTree(obj); + } diff --git a/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_ignorespace/diff_HttpClientManager.java.txt b/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_ignorespace/diff_HttpClientManager.java.txt new file mode 100644 index 0000000000..ad5b91edf0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_ignorespace/diff_HttpClientManager.java.txt @@ -0,0 +1,146 @@ +==== +1:18a +2:19,27c + <<<<<<< HEAD + import com.alibaba.nacos.common.http.client.ApacheAsyncHttpClientRequest; + import com.alibaba.nacos.common.http.client.ApacheHttpClientRequest; + import com.alibaba.nacos.common.http.client.NacosAsyncRestTemplate; + import com.alibaba.nacos.common.http.client.NacosRestTemplate; + ||||||| d4efcf4df + ======= + import com.alibaba.nacos.common.utils.ExceptionUtil; + >>>>>>> TEMP_RIGHT_BRANCH +3:19,22c + import com.alibaba.nacos.common.http.client.ApacheAsyncHttpClientRequest; + import com.alibaba.nacos.common.http.client.ApacheHttpClientRequest; + import com.alibaba.nacos.common.http.client.NacosAsyncRestTemplate; + import com.alibaba.nacos.common.http.client.NacosRestTemplate; +====3 +1:19a +2:28a +3:24c + import com.alibaba.nacos.common.utils.ExceptionUtil; +====1 +1:25a +2:35,36c +3:31,32c + import java.util.concurrent.atomic.AtomicBoolean; + +==== +1:45a +2:57,68c + + <<<<<<< HEAD + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + ||||||| d4efcf4df + ======= + private static final AtomicBoolean alreadyShutdown = new AtomicBoolean(false); + >>>>>>> TEMP_RIGHT_BRANCH +3:53,61c + + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final AtomicBoolean alreadyShutdown = new AtomicBoolean(false); + +==== +1:51,58c + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); +2:74,96c + <<<<<<< HEAD + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); + ||||||| d4efcf4df + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); + ======= + shutdown(); + >>>>>>> TEMP_RIGHT_BRANCH +3:67c + shutdown(); +==== +1:71a +2:110,135c + <<<<<<< HEAD + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + ||||||| d4efcf4df + ======= + public static void shutdown() { + if (!alreadyShutdown.compareAndSet(false, true)) { + return; + } + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ex) { + logger.error("An exception occurred when the HTTP client was closed : {}", + ExceptionUtil.getStackTrace(ex)); + } + logger.warn("[HttpClientManager] Destruction of the end"); + >>>>>>> TEMP_RIGHT_BRANCH + } + +3:81,107c + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + + public static void shutdown() { + if (!alreadyShutdown.compareAndSet(false, true)) { + return; + } + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ex) { + logger.error("An exception occurred when the HTTP client was closed : {}", + ExceptionUtil.getStackTrace(ex)); + } + logger.warn("[HttpClientManager] Destruction of the end"); + } + + diff --git a/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_ignorespace/diff_JacksonUtils.java.txt b/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_ignorespace/diff_JacksonUtils.java.txt new file mode 100644 index 0000000000..4cbcfdb78c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_ignorespace/diff_JacksonUtils.java.txt @@ -0,0 +1,256 @@ +====1 +1:18a +2:19,23c +3:19,23c + import com.alibaba.nacos.api.exception.runtime.NacosDeserializationException; + import com.alibaba.nacos.api.exception.runtime.NacosSerializationException; + import com.fasterxml.jackson.annotation.JsonInclude.Include; + import com.fasterxml.jackson.core.JsonProcessingException; + import com.fasterxml.jackson.core.type.TypeReference; +====1 +1:19a +2:25c +3:25c + import com.fasterxml.jackson.databind.JsonNode; +====1 +1:20a +2:27,29c +3:27,29c + import com.fasterxml.jackson.databind.jsontype.NamedType; + import com.fasterxml.jackson.databind.node.ArrayNode; + import com.fasterxml.jackson.databind.node.ObjectNode; +==== +1:21a +2:31,36c + <<<<<<< HEAD + import java.io.InputStream; + ||||||| d4efcf4df + ======= + import java.io.IOException; + >>>>>>> TEMP_RIGHT_BRANCH +3:31,32c + import java.io.InputStream; + import java.io.IOException +====1 +1:32a +2:48c +3:44c + mapper.setSerializationInclusion(Include.NON_NULL); +==== +1:35,49c + public static String toJson(Object obj) throws Exception { + return mapper.writeValueAsString(obj); + } + + public static byte[] toJsonBytes(Object obj) throws Exception { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } + + public static T toObj(byte[] json, Class cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(byte[] json, Type cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } +2:51,86c + public static String toJson(Object obj) { + try { + return mapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static byte[] toJsonBytes(Object obj) { + try { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static T toObj(byte[] json, Class cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(byte[] json, Type cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + <<<<<<< HEAD + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +3:47,82c + public static String toJson(Object obj) { + try { + return mapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static byte[] toJsonBytes(Object obj) { + try { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static T toObj(byte[] json, Class cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(byte[] json, Type cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +==== +1:52,57c + return mapper.readValue(json, cls); + } + + public static T toObj(String json, Type type) throws Exception { + return mapper.readValue(json, mapper.constructType(type)); + } +2:89,147c + ||||||| d4efcf4df + public static T toObj(String json, Class cls) throws Exception { + ======= + public static T toObj(byte[] json, TypeReference typeReference) { + try { + return toObj(StringUtils.newString4UTF8(json), typeReference); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, Class cls) { + try { + >>>>>>> TEMP_RIGHT_BRANCH + return mapper.readValue(json, cls); + } catch (IOException e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(String json, Type type) { + try { + return mapper.readValue(json, mapper.constructType(type)); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, TypeReference typeReference) { + try { + return mapper.readValue(json, typeReference); + } catch (IOException e) { + throw new NacosDeserializationException(typeReference.getClass(), e); + } + } + + public static JsonNode toObj(String json) { + try { + return mapper.readTree(json); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static void registerSubtype(Class clz, String type) { + mapper.registerSubtypes(new NamedType(clz, type)); + } + + public static ObjectNode createEmptyJsonNode() { + return new ObjectNode(mapper.getNodeFactory()); + } + + public static ArrayNode createEmptyArrayNode() { + return new ArrayNode(mapper.getNodeFactory()); + } + + public static JsonNode transferToJsonNode(Object obj) { + return mapper.valueToTree(obj); + } +3:85,142c + return mapper.readValue(json, cls); + } + + public static T toObj(byte[] json, TypeReference typeReference) { + try { + return toObj(StringUtils.newString4UTF8(json), typeReference); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, Class cls) { + try { + return mapper.readValue(json, cls); + } catch (IOException e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(String json, Type type) { + try { + return mapper.readValue(json, mapper.constructType(type)); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, TypeReference typeReference) { + try { + return mapper.readValue(json, typeReference); + } catch (IOException e) { + throw new NacosDeserializationException(typeReference.getClass(), e); + } + } + + public static JsonNode toObj(String json) { + try { + return mapper.readTree(json); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static void registerSubtype(Class clz, String type) { + mapper.registerSubtypes(new NamedType(clz, type)); + } + + public static ObjectNode createEmptyJsonNode() { + return new ObjectNode(mapper.getNodeFactory()); + } + + public static ArrayNode createEmptyArrayNode() { + return new ArrayNode(mapper.getNodeFactory()); + } + + public static JsonNode transferToJsonNode(Object obj) { + return mapper.valueToTree(obj); + } diff --git a/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_imports/diff_HttpClientManager.java.txt b/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_imports/diff_HttpClientManager.java.txt new file mode 100644 index 0000000000..f2d16f1cdc --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_imports/diff_HttpClientManager.java.txt @@ -0,0 +1,101 @@ +====1 +1:18a +2:19,22c +3:19,22c + import com.alibaba.nacos.common.http.client.ApacheAsyncHttpClientRequest; + import com.alibaba.nacos.common.http.client.ApacheHttpClientRequest; + import com.alibaba.nacos.common.http.client.NacosAsyncRestTemplate; + import com.alibaba.nacos.common.http.client.NacosRestTemplate; +====3 +1:19a +2:23a +3:24c + import com.alibaba.nacos.common.utils.ExceptionUtil; +====3 +1:25a +2:29a +3:31,32c + import java.util.concurrent.atomic.AtomicBoolean; + +==== +1:45a +2:50,56c + + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + +3:53,61c + + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final AtomicBoolean alreadyShutdown = new AtomicBoolean(false); + +==== +1:51,58c + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); +2:62,71c + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); +3:67c + shutdown(); +==== +1:71a +2:85,92c + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + +3:81,107c + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + + public static void shutdown() { + if (!alreadyShutdown.compareAndSet(false, true)) { + return; + } + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ex) { + logger.error("An exception occurred when the HTTP client was closed : {}", + ExceptionUtil.getStackTrace(ex)); + } + logger.warn("[HttpClientManager] Destruction of the end"); + } + + diff --git a/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_imports/diff_JacksonUtils.java.txt b/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_imports/diff_JacksonUtils.java.txt new file mode 100644 index 0000000000..4362955cdf --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_imports/diff_JacksonUtils.java.txt @@ -0,0 +1,175 @@ +====3 +1:18a +2:18a +3:19,23c + import com.alibaba.nacos.api.exception.runtime.NacosDeserializationException; + import com.alibaba.nacos.api.exception.runtime.NacosSerializationException; + import com.fasterxml.jackson.annotation.JsonInclude.Include; + import com.fasterxml.jackson.core.JsonProcessingException; + import com.fasterxml.jackson.core.type.TypeReference; +====3 +1:19a +2:19a +3:25c + import com.fasterxml.jackson.databind.JsonNode; +====3 +1:20a +2:20a +3:27,29c + import com.fasterxml.jackson.databind.jsontype.NamedType; + import com.fasterxml.jackson.databind.node.ArrayNode; + import com.fasterxml.jackson.databind.node.ObjectNode; +==== +1:21a +2:22c + import java.io.InputStream; +3:31,32c + import java.io.InputStream; + import java.io.IOException +====3 +1:32a +2:33a +3:44c + mapper.setSerializationInclusion(Include.NON_NULL); +==== +1:35,49c + public static String toJson(Object obj) throws Exception { + return mapper.writeValueAsString(obj); + } + + public static byte[] toJsonBytes(Object obj) throws Exception { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } + + public static T toObj(byte[] json, Class cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(byte[] json, Type cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } +2:36,54c + public static String toJson(Object obj) throws Exception { + return mapper.writeValueAsString(obj); + } + + public static byte[] toJsonBytes(Object obj) throws Exception { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } + + public static T toObj(byte[] json, Class cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(byte[] json, Type cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +3:47,82c + public static String toJson(Object obj) { + try { + return mapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static byte[] toJsonBytes(Object obj) { + try { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static T toObj(byte[] json, Class cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(byte[] json, Type cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +====3 +1:53c +2:58c + } +3:86c + } +====3 +1:55,57c +2:60,62c + public static T toObj(String json, Type type) throws Exception { + return mapper.readValue(json, mapper.constructType(type)); + } +3:88,142c + public static T toObj(byte[] json, TypeReference typeReference) { + try { + return toObj(StringUtils.newString4UTF8(json), typeReference); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, Class cls) { + try { + return mapper.readValue(json, cls); + } catch (IOException e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(String json, Type type) { + try { + return mapper.readValue(json, mapper.constructType(type)); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, TypeReference typeReference) { + try { + return mapper.readValue(json, typeReference); + } catch (IOException e) { + throw new NacosDeserializationException(typeReference.getClass(), e); + } + } + + public static JsonNode toObj(String json) { + try { + return mapper.readTree(json); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static void registerSubtype(Class clz, String type) { + mapper.registerSubtypes(new NamedType(clz, type)); + } + + public static ObjectNode createEmptyJsonNode() { + return new ObjectNode(mapper.getNodeFactory()); + } + + public static ArrayNode createEmptyArrayNode() { + return new ArrayNode(mapper.getNodeFactory()); + } + + public static JsonNode transferToJsonNode(Object obj) { + return mapper.valueToTree(obj); + } diff --git a/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_imports_ignorespace/diff_HttpClientManager.java.txt b/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_imports_ignorespace/diff_HttpClientManager.java.txt new file mode 100644 index 0000000000..f2d16f1cdc --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_imports_ignorespace/diff_HttpClientManager.java.txt @@ -0,0 +1,101 @@ +====1 +1:18a +2:19,22c +3:19,22c + import com.alibaba.nacos.common.http.client.ApacheAsyncHttpClientRequest; + import com.alibaba.nacos.common.http.client.ApacheHttpClientRequest; + import com.alibaba.nacos.common.http.client.NacosAsyncRestTemplate; + import com.alibaba.nacos.common.http.client.NacosRestTemplate; +====3 +1:19a +2:23a +3:24c + import com.alibaba.nacos.common.utils.ExceptionUtil; +====3 +1:25a +2:29a +3:31,32c + import java.util.concurrent.atomic.AtomicBoolean; + +==== +1:45a +2:50,56c + + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + +3:53,61c + + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final AtomicBoolean alreadyShutdown = new AtomicBoolean(false); + +==== +1:51,58c + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); +2:62,71c + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); +3:67c + shutdown(); +==== +1:71a +2:85,92c + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + +3:81,107c + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + + public static void shutdown() { + if (!alreadyShutdown.compareAndSet(false, true)) { + return; + } + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ex) { + logger.error("An exception occurred when the HTTP client was closed : {}", + ExceptionUtil.getStackTrace(ex)); + } + logger.warn("[HttpClientManager] Destruction of the end"); + } + + diff --git a/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_imports_ignorespace/diff_JacksonUtils.java.txt b/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_imports_ignorespace/diff_JacksonUtils.java.txt new file mode 100644 index 0000000000..4362955cdf --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/gitmerge_ort_imports_ignorespace/diff_JacksonUtils.java.txt @@ -0,0 +1,175 @@ +====3 +1:18a +2:18a +3:19,23c + import com.alibaba.nacos.api.exception.runtime.NacosDeserializationException; + import com.alibaba.nacos.api.exception.runtime.NacosSerializationException; + import com.fasterxml.jackson.annotation.JsonInclude.Include; + import com.fasterxml.jackson.core.JsonProcessingException; + import com.fasterxml.jackson.core.type.TypeReference; +====3 +1:19a +2:19a +3:25c + import com.fasterxml.jackson.databind.JsonNode; +====3 +1:20a +2:20a +3:27,29c + import com.fasterxml.jackson.databind.jsontype.NamedType; + import com.fasterxml.jackson.databind.node.ArrayNode; + import com.fasterxml.jackson.databind.node.ObjectNode; +==== +1:21a +2:22c + import java.io.InputStream; +3:31,32c + import java.io.InputStream; + import java.io.IOException +====3 +1:32a +2:33a +3:44c + mapper.setSerializationInclusion(Include.NON_NULL); +==== +1:35,49c + public static String toJson(Object obj) throws Exception { + return mapper.writeValueAsString(obj); + } + + public static byte[] toJsonBytes(Object obj) throws Exception { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } + + public static T toObj(byte[] json, Class cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(byte[] json, Type cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } +2:36,54c + public static String toJson(Object obj) throws Exception { + return mapper.writeValueAsString(obj); + } + + public static byte[] toJsonBytes(Object obj) throws Exception { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } + + public static T toObj(byte[] json, Class cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(byte[] json, Type cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +3:47,82c + public static String toJson(Object obj) { + try { + return mapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static byte[] toJsonBytes(Object obj) { + try { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static T toObj(byte[] json, Class cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(byte[] json, Type cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +====3 +1:53c +2:58c + } +3:86c + } +====3 +1:55,57c +2:60,62c + public static T toObj(String json, Type type) throws Exception { + return mapper.readValue(json, mapper.constructType(type)); + } +3:88,142c + public static T toObj(byte[] json, TypeReference typeReference) { + try { + return toObj(StringUtils.newString4UTF8(json), typeReference); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, Class cls) { + try { + return mapper.readValue(json, cls); + } catch (IOException e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(String json, Type type) { + try { + return mapper.readValue(json, mapper.constructType(type)); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, TypeReference typeReference) { + try { + return mapper.readValue(json, typeReference); + } catch (IOException e) { + throw new NacosDeserializationException(typeReference.getClass(), e); + } + } + + public static JsonNode toObj(String json) { + try { + return mapper.readTree(json); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static void registerSubtype(Class clz, String type) { + mapper.registerSubtypes(new NamedType(clz, type)); + } + + public static ObjectNode createEmptyJsonNode() { + return new ObjectNode(mapper.getNodeFactory()); + } + + public static ArrayNode createEmptyArrayNode() { + return new ArrayNode(mapper.getNodeFactory()); + } + + public static JsonNode transferToJsonNode(Object obj) { + return mapper.valueToTree(obj); + } diff --git a/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_histogram/diff_HttpClientManager.java.txt b/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_histogram/diff_HttpClientManager.java.txt new file mode 100644 index 0000000000..98a510768a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_histogram/diff_HttpClientManager.java.txt @@ -0,0 +1,147 @@ +==== +1:18a +2:19,27c + <<<<<<< HEAD + import com.alibaba.nacos.common.http.client.ApacheAsyncHttpClientRequest; + import com.alibaba.nacos.common.http.client.ApacheHttpClientRequest; + import com.alibaba.nacos.common.http.client.NacosAsyncRestTemplate; + import com.alibaba.nacos.common.http.client.NacosRestTemplate; + ||||||| d4efcf4df + ======= + import com.alibaba.nacos.common.utils.ExceptionUtil; + >>>>>>> TEMP_RIGHT_BRANCH +3:19,22c + import com.alibaba.nacos.common.http.client.ApacheAsyncHttpClientRequest; + import com.alibaba.nacos.common.http.client.ApacheHttpClientRequest; + import com.alibaba.nacos.common.http.client.NacosAsyncRestTemplate; + import com.alibaba.nacos.common.http.client.NacosRestTemplate; +====3 +1:19a +2:28a +3:24c + import com.alibaba.nacos.common.utils.ExceptionUtil; +====1 +1:25a +2:35,36c +3:31,32c + import java.util.concurrent.atomic.AtomicBoolean; + +==== +1:45a +2:57,68c + + <<<<<<< HEAD + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + ||||||| d4efcf4df + ======= + private static final AtomicBoolean alreadyShutdown = new AtomicBoolean(false); + >>>>>>> TEMP_RIGHT_BRANCH +3:53,61c + + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final AtomicBoolean alreadyShutdown = new AtomicBoolean(false); + +==== +1:51,58c + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); +2:74,96c + <<<<<<< HEAD + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); + ||||||| d4efcf4df + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); + ======= + shutdown(); + >>>>>>> TEMP_RIGHT_BRANCH +3:67c + shutdown(); +==== +1:71a +2:110,136c + <<<<<<< HEAD + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + ||||||| d4efcf4df + ======= + public static void shutdown() { + if (!alreadyShutdown.compareAndSet(false, true)) { + return; + } + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ex) { + logger.error("An exception occurred when the HTTP client was closed : {}", + ExceptionUtil.getStackTrace(ex)); + } + logger.warn("[HttpClientManager] Destruction of the end"); + } + >>>>>>> TEMP_RIGHT_BRANCH + +3:81,107c + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + + public static void shutdown() { + if (!alreadyShutdown.compareAndSet(false, true)) { + return; + } + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ex) { + logger.error("An exception occurred when the HTTP client was closed : {}", + ExceptionUtil.getStackTrace(ex)); + } + logger.warn("[HttpClientManager] Destruction of the end"); + } + + diff --git a/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_histogram/diff_JacksonUtils.java.txt b/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_histogram/diff_JacksonUtils.java.txt new file mode 100644 index 0000000000..0e754a4ff8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_histogram/diff_JacksonUtils.java.txt @@ -0,0 +1,257 @@ +====1 +1:18a +2:19,23c +3:19,23c + import com.alibaba.nacos.api.exception.runtime.NacosDeserializationException; + import com.alibaba.nacos.api.exception.runtime.NacosSerializationException; + import com.fasterxml.jackson.annotation.JsonInclude.Include; + import com.fasterxml.jackson.core.JsonProcessingException; + import com.fasterxml.jackson.core.type.TypeReference; +====1 +1:19a +2:25c +3:25c + import com.fasterxml.jackson.databind.JsonNode; +====1 +1:20a +2:27,29c +3:27,29c + import com.fasterxml.jackson.databind.jsontype.NamedType; + import com.fasterxml.jackson.databind.node.ArrayNode; + import com.fasterxml.jackson.databind.node.ObjectNode; +==== +1:21a +2:31,36c + <<<<<<< HEAD + import java.io.InputStream; + ||||||| d4efcf4df + ======= + import java.io.IOException; + >>>>>>> TEMP_RIGHT_BRANCH +3:31,32c + import java.io.InputStream; + import java.io.IOException +====1 +1:32a +2:48c +3:44c + mapper.setSerializationInclusion(Include.NON_NULL); +==== +1:35,49c + public static String toJson(Object obj) throws Exception { + return mapper.writeValueAsString(obj); + } + + public static byte[] toJsonBytes(Object obj) throws Exception { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } + + public static T toObj(byte[] json, Class cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(byte[] json, Type cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } +2:51,86c + public static String toJson(Object obj) { + try { + return mapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static byte[] toJsonBytes(Object obj) { + try { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static T toObj(byte[] json, Class cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(byte[] json, Type cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + <<<<<<< HEAD + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +3:47,82c + public static String toJson(Object obj) { + try { + return mapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static byte[] toJsonBytes(Object obj) { + try { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static T toObj(byte[] json, Class cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(byte[] json, Type cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +==== +1:53,57c + } + + public static T toObj(String json, Type type) throws Exception { + return mapper.readValue(json, mapper.constructType(type)); + } +2:90,151c + } + ||||||| d4efcf4df + public static T toObj(String json, Class cls) throws Exception { + return mapper.readValue(json, cls); + } + ======= + public static T toObj(byte[] json, TypeReference typeReference) { + try { + return toObj(StringUtils.newString4UTF8(json), typeReference); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + >>>>>>> TEMP_RIGHT_BRANCH + + public static T toObj(String json, Class cls) { + try { + return mapper.readValue(json, cls); + } catch (IOException e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(String json, Type type) { + try { + return mapper.readValue(json, mapper.constructType(type)); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, TypeReference typeReference) { + try { + return mapper.readValue(json, typeReference); + } catch (IOException e) { + throw new NacosDeserializationException(typeReference.getClass(), e); + } + } + + public static JsonNode toObj(String json) { + try { + return mapper.readTree(json); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static void registerSubtype(Class clz, String type) { + mapper.registerSubtypes(new NamedType(clz, type)); + } + + public static ObjectNode createEmptyJsonNode() { + return new ObjectNode(mapper.getNodeFactory()); + } + + public static ArrayNode createEmptyArrayNode() { + return new ArrayNode(mapper.getNodeFactory()); + } + + public static JsonNode transferToJsonNode(Object obj) { + return mapper.valueToTree(obj); + } +3:86,142c + } + + public static T toObj(byte[] json, TypeReference typeReference) { + try { + return toObj(StringUtils.newString4UTF8(json), typeReference); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, Class cls) { + try { + return mapper.readValue(json, cls); + } catch (IOException e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(String json, Type type) { + try { + return mapper.readValue(json, mapper.constructType(type)); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, TypeReference typeReference) { + try { + return mapper.readValue(json, typeReference); + } catch (IOException e) { + throw new NacosDeserializationException(typeReference.getClass(), e); + } + } + + public static JsonNode toObj(String json) { + try { + return mapper.readTree(json); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static void registerSubtype(Class clz, String type) { + mapper.registerSubtypes(new NamedType(clz, type)); + } + + public static ObjectNode createEmptyJsonNode() { + return new ObjectNode(mapper.getNodeFactory()); + } + + public static ArrayNode createEmptyArrayNode() { + return new ArrayNode(mapper.getNodeFactory()); + } + + public static JsonNode transferToJsonNode(Object obj) { + return mapper.valueToTree(obj); + } diff --git a/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_ignorespace/diff_HttpClientManager.java.txt b/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_ignorespace/diff_HttpClientManager.java.txt new file mode 100644 index 0000000000..ad5b91edf0 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_ignorespace/diff_HttpClientManager.java.txt @@ -0,0 +1,146 @@ +==== +1:18a +2:19,27c + <<<<<<< HEAD + import com.alibaba.nacos.common.http.client.ApacheAsyncHttpClientRequest; + import com.alibaba.nacos.common.http.client.ApacheHttpClientRequest; + import com.alibaba.nacos.common.http.client.NacosAsyncRestTemplate; + import com.alibaba.nacos.common.http.client.NacosRestTemplate; + ||||||| d4efcf4df + ======= + import com.alibaba.nacos.common.utils.ExceptionUtil; + >>>>>>> TEMP_RIGHT_BRANCH +3:19,22c + import com.alibaba.nacos.common.http.client.ApacheAsyncHttpClientRequest; + import com.alibaba.nacos.common.http.client.ApacheHttpClientRequest; + import com.alibaba.nacos.common.http.client.NacosAsyncRestTemplate; + import com.alibaba.nacos.common.http.client.NacosRestTemplate; +====3 +1:19a +2:28a +3:24c + import com.alibaba.nacos.common.utils.ExceptionUtil; +====1 +1:25a +2:35,36c +3:31,32c + import java.util.concurrent.atomic.AtomicBoolean; + +==== +1:45a +2:57,68c + + <<<<<<< HEAD + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + ||||||| d4efcf4df + ======= + private static final AtomicBoolean alreadyShutdown = new AtomicBoolean(false); + >>>>>>> TEMP_RIGHT_BRANCH +3:53,61c + + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final AtomicBoolean alreadyShutdown = new AtomicBoolean(false); + +==== +1:51,58c + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); +2:74,96c + <<<<<<< HEAD + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); + ||||||| d4efcf4df + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); + ======= + shutdown(); + >>>>>>> TEMP_RIGHT_BRANCH +3:67c + shutdown(); +==== +1:71a +2:110,135c + <<<<<<< HEAD + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + ||||||| d4efcf4df + ======= + public static void shutdown() { + if (!alreadyShutdown.compareAndSet(false, true)) { + return; + } + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ex) { + logger.error("An exception occurred when the HTTP client was closed : {}", + ExceptionUtil.getStackTrace(ex)); + } + logger.warn("[HttpClientManager] Destruction of the end"); + >>>>>>> TEMP_RIGHT_BRANCH + } + +3:81,107c + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + + public static void shutdown() { + if (!alreadyShutdown.compareAndSet(false, true)) { + return; + } + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ex) { + logger.error("An exception occurred when the HTTP client was closed : {}", + ExceptionUtil.getStackTrace(ex)); + } + logger.warn("[HttpClientManager] Destruction of the end"); + } + + diff --git a/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_ignorespace/diff_JacksonUtils.java.txt b/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_ignorespace/diff_JacksonUtils.java.txt new file mode 100644 index 0000000000..3acb06a339 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_ignorespace/diff_JacksonUtils.java.txt @@ -0,0 +1,258 @@ +====1 +1:18a +2:19,23c +3:19,23c + import com.alibaba.nacos.api.exception.runtime.NacosDeserializationException; + import com.alibaba.nacos.api.exception.runtime.NacosSerializationException; + import com.fasterxml.jackson.annotation.JsonInclude.Include; + import com.fasterxml.jackson.core.JsonProcessingException; + import com.fasterxml.jackson.core.type.TypeReference; +====1 +1:19a +2:25c +3:25c + import com.fasterxml.jackson.databind.JsonNode; +====1 +1:20a +2:27,29c +3:27,29c + import com.fasterxml.jackson.databind.jsontype.NamedType; + import com.fasterxml.jackson.databind.node.ArrayNode; + import com.fasterxml.jackson.databind.node.ObjectNode; +==== +1:21a +2:31,36c + <<<<<<< HEAD + import java.io.InputStream; + ||||||| d4efcf4df + ======= + import java.io.IOException; + >>>>>>> TEMP_RIGHT_BRANCH +3:31,32c + import java.io.InputStream; + import java.io.IOException +====1 +1:32a +2:48c +3:44c + mapper.setSerializationInclusion(Include.NON_NULL); +==== +1:35,57c + public static String toJson(Object obj) throws Exception { + return mapper.writeValueAsString(obj); + } + + public static byte[] toJsonBytes(Object obj) throws Exception { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } + + public static T toObj(byte[] json, Class cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(byte[] json, Type cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(String json, Class cls) throws Exception { + return mapper.readValue(json, cls); + } + + public static T toObj(String json, Type type) throws Exception { + return mapper.readValue(json, mapper.constructType(type)); + } +2:51,147c + public static String toJson(Object obj) { + try { + return mapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static byte[] toJsonBytes(Object obj) { + try { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static T toObj(byte[] json, Class cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(byte[] json, Type cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(byte[] json, TypeReference typeReference) { + try { + return toObj(StringUtils.newString4UTF8(json), typeReference); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + <<<<<<< HEAD + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } + + public static T toObj(String json, Class cls) throws Exception { + ||||||| d4efcf4df + public static T toObj(String json, Class cls) throws Exception { + ======= + public static T toObj(String json, Class cls) { + try { + >>>>>>> TEMP_RIGHT_BRANCH + return mapper.readValue(json, cls); + } catch (IOException e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(String json, Type type) { + try { + return mapper.readValue(json, mapper.constructType(type)); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, TypeReference typeReference) { + try { + return mapper.readValue(json, typeReference); + } catch (IOException e) { + throw new NacosDeserializationException(typeReference.getClass(), e); + } + } + + public static JsonNode toObj(String json) { + try { + return mapper.readTree(json); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static void registerSubtype(Class clz, String type) { + mapper.registerSubtypes(new NamedType(clz, type)); + } + + public static ObjectNode createEmptyJsonNode() { + return new ObjectNode(mapper.getNodeFactory()); + } + + public static ArrayNode createEmptyArrayNode() { + return new ArrayNode(mapper.getNodeFactory()); + } + + public static JsonNode transferToJsonNode(Object obj) { + return mapper.valueToTree(obj); + } +3:47,142c + public static String toJson(Object obj) { + try { + return mapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static byte[] toJsonBytes(Object obj) { + try { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static T toObj(byte[] json, Class cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(byte[] json, Type cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } + + public static T toObj(String json, Class cls) throws Exception { + return mapper.readValue(json, cls); + } + + public static T toObj(byte[] json, TypeReference typeReference) { + try { + return toObj(StringUtils.newString4UTF8(json), typeReference); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, Class cls) { + try { + return mapper.readValue(json, cls); + } catch (IOException e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(String json, Type type) { + try { + return mapper.readValue(json, mapper.constructType(type)); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, TypeReference typeReference) { + try { + return mapper.readValue(json, typeReference); + } catch (IOException e) { + throw new NacosDeserializationException(typeReference.getClass(), e); + } + } + + public static JsonNode toObj(String json) { + try { + return mapper.readTree(json); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static void registerSubtype(Class clz, String type) { + mapper.registerSubtypes(new NamedType(clz, type)); + } + + public static ObjectNode createEmptyJsonNode() { + return new ObjectNode(mapper.getNodeFactory()); + } + + public static ArrayNode createEmptyArrayNode() { + return new ArrayNode(mapper.getNodeFactory()); + } + + public static JsonNode transferToJsonNode(Object obj) { + return mapper.valueToTree(obj); + } diff --git a/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_minimal/diff_HttpClientManager.java.txt b/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_minimal/diff_HttpClientManager.java.txt new file mode 100644 index 0000000000..98a510768a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_minimal/diff_HttpClientManager.java.txt @@ -0,0 +1,147 @@ +==== +1:18a +2:19,27c + <<<<<<< HEAD + import com.alibaba.nacos.common.http.client.ApacheAsyncHttpClientRequest; + import com.alibaba.nacos.common.http.client.ApacheHttpClientRequest; + import com.alibaba.nacos.common.http.client.NacosAsyncRestTemplate; + import com.alibaba.nacos.common.http.client.NacosRestTemplate; + ||||||| d4efcf4df + ======= + import com.alibaba.nacos.common.utils.ExceptionUtil; + >>>>>>> TEMP_RIGHT_BRANCH +3:19,22c + import com.alibaba.nacos.common.http.client.ApacheAsyncHttpClientRequest; + import com.alibaba.nacos.common.http.client.ApacheHttpClientRequest; + import com.alibaba.nacos.common.http.client.NacosAsyncRestTemplate; + import com.alibaba.nacos.common.http.client.NacosRestTemplate; +====3 +1:19a +2:28a +3:24c + import com.alibaba.nacos.common.utils.ExceptionUtil; +====1 +1:25a +2:35,36c +3:31,32c + import java.util.concurrent.atomic.AtomicBoolean; + +==== +1:45a +2:57,68c + + <<<<<<< HEAD + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + ||||||| d4efcf4df + ======= + private static final AtomicBoolean alreadyShutdown = new AtomicBoolean(false); + >>>>>>> TEMP_RIGHT_BRANCH +3:53,61c + + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final AtomicBoolean alreadyShutdown = new AtomicBoolean(false); + +==== +1:51,58c + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); +2:74,96c + <<<<<<< HEAD + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); + ||||||| d4efcf4df + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); + ======= + shutdown(); + >>>>>>> TEMP_RIGHT_BRANCH +3:67c + shutdown(); +==== +1:71a +2:110,136c + <<<<<<< HEAD + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + ||||||| d4efcf4df + ======= + public static void shutdown() { + if (!alreadyShutdown.compareAndSet(false, true)) { + return; + } + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ex) { + logger.error("An exception occurred when the HTTP client was closed : {}", + ExceptionUtil.getStackTrace(ex)); + } + logger.warn("[HttpClientManager] Destruction of the end"); + } + >>>>>>> TEMP_RIGHT_BRANCH + +3:81,107c + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + + public static void shutdown() { + if (!alreadyShutdown.compareAndSet(false, true)) { + return; + } + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ex) { + logger.error("An exception occurred when the HTTP client was closed : {}", + ExceptionUtil.getStackTrace(ex)); + } + logger.warn("[HttpClientManager] Destruction of the end"); + } + + diff --git a/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_minimal/diff_JacksonUtils.java.txt b/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_minimal/diff_JacksonUtils.java.txt new file mode 100644 index 0000000000..0e754a4ff8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_minimal/diff_JacksonUtils.java.txt @@ -0,0 +1,257 @@ +====1 +1:18a +2:19,23c +3:19,23c + import com.alibaba.nacos.api.exception.runtime.NacosDeserializationException; + import com.alibaba.nacos.api.exception.runtime.NacosSerializationException; + import com.fasterxml.jackson.annotation.JsonInclude.Include; + import com.fasterxml.jackson.core.JsonProcessingException; + import com.fasterxml.jackson.core.type.TypeReference; +====1 +1:19a +2:25c +3:25c + import com.fasterxml.jackson.databind.JsonNode; +====1 +1:20a +2:27,29c +3:27,29c + import com.fasterxml.jackson.databind.jsontype.NamedType; + import com.fasterxml.jackson.databind.node.ArrayNode; + import com.fasterxml.jackson.databind.node.ObjectNode; +==== +1:21a +2:31,36c + <<<<<<< HEAD + import java.io.InputStream; + ||||||| d4efcf4df + ======= + import java.io.IOException; + >>>>>>> TEMP_RIGHT_BRANCH +3:31,32c + import java.io.InputStream; + import java.io.IOException +====1 +1:32a +2:48c +3:44c + mapper.setSerializationInclusion(Include.NON_NULL); +==== +1:35,49c + public static String toJson(Object obj) throws Exception { + return mapper.writeValueAsString(obj); + } + + public static byte[] toJsonBytes(Object obj) throws Exception { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } + + public static T toObj(byte[] json, Class cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(byte[] json, Type cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } +2:51,86c + public static String toJson(Object obj) { + try { + return mapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static byte[] toJsonBytes(Object obj) { + try { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static T toObj(byte[] json, Class cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(byte[] json, Type cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + <<<<<<< HEAD + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +3:47,82c + public static String toJson(Object obj) { + try { + return mapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static byte[] toJsonBytes(Object obj) { + try { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static T toObj(byte[] json, Class cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(byte[] json, Type cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +==== +1:53,57c + } + + public static T toObj(String json, Type type) throws Exception { + return mapper.readValue(json, mapper.constructType(type)); + } +2:90,151c + } + ||||||| d4efcf4df + public static T toObj(String json, Class cls) throws Exception { + return mapper.readValue(json, cls); + } + ======= + public static T toObj(byte[] json, TypeReference typeReference) { + try { + return toObj(StringUtils.newString4UTF8(json), typeReference); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + >>>>>>> TEMP_RIGHT_BRANCH + + public static T toObj(String json, Class cls) { + try { + return mapper.readValue(json, cls); + } catch (IOException e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(String json, Type type) { + try { + return mapper.readValue(json, mapper.constructType(type)); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, TypeReference typeReference) { + try { + return mapper.readValue(json, typeReference); + } catch (IOException e) { + throw new NacosDeserializationException(typeReference.getClass(), e); + } + } + + public static JsonNode toObj(String json) { + try { + return mapper.readTree(json); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static void registerSubtype(Class clz, String type) { + mapper.registerSubtypes(new NamedType(clz, type)); + } + + public static ObjectNode createEmptyJsonNode() { + return new ObjectNode(mapper.getNodeFactory()); + } + + public static ArrayNode createEmptyArrayNode() { + return new ArrayNode(mapper.getNodeFactory()); + } + + public static JsonNode transferToJsonNode(Object obj) { + return mapper.valueToTree(obj); + } +3:86,142c + } + + public static T toObj(byte[] json, TypeReference typeReference) { + try { + return toObj(StringUtils.newString4UTF8(json), typeReference); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, Class cls) { + try { + return mapper.readValue(json, cls); + } catch (IOException e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(String json, Type type) { + try { + return mapper.readValue(json, mapper.constructType(type)); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, TypeReference typeReference) { + try { + return mapper.readValue(json, typeReference); + } catch (IOException e) { + throw new NacosDeserializationException(typeReference.getClass(), e); + } + } + + public static JsonNode toObj(String json) { + try { + return mapper.readTree(json); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static void registerSubtype(Class clz, String type) { + mapper.registerSubtypes(new NamedType(clz, type)); + } + + public static ObjectNode createEmptyJsonNode() { + return new ObjectNode(mapper.getNodeFactory()); + } + + public static ArrayNode createEmptyArrayNode() { + return new ArrayNode(mapper.getNodeFactory()); + } + + public static JsonNode transferToJsonNode(Object obj) { + return mapper.valueToTree(obj); + } diff --git a/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_myers/diff_HttpClientManager.java.txt b/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_myers/diff_HttpClientManager.java.txt new file mode 100644 index 0000000000..98a510768a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_myers/diff_HttpClientManager.java.txt @@ -0,0 +1,147 @@ +==== +1:18a +2:19,27c + <<<<<<< HEAD + import com.alibaba.nacos.common.http.client.ApacheAsyncHttpClientRequest; + import com.alibaba.nacos.common.http.client.ApacheHttpClientRequest; + import com.alibaba.nacos.common.http.client.NacosAsyncRestTemplate; + import com.alibaba.nacos.common.http.client.NacosRestTemplate; + ||||||| d4efcf4df + ======= + import com.alibaba.nacos.common.utils.ExceptionUtil; + >>>>>>> TEMP_RIGHT_BRANCH +3:19,22c + import com.alibaba.nacos.common.http.client.ApacheAsyncHttpClientRequest; + import com.alibaba.nacos.common.http.client.ApacheHttpClientRequest; + import com.alibaba.nacos.common.http.client.NacosAsyncRestTemplate; + import com.alibaba.nacos.common.http.client.NacosRestTemplate; +====3 +1:19a +2:28a +3:24c + import com.alibaba.nacos.common.utils.ExceptionUtil; +====1 +1:25a +2:35,36c +3:31,32c + import java.util.concurrent.atomic.AtomicBoolean; + +==== +1:45a +2:57,68c + + <<<<<<< HEAD + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + ||||||| d4efcf4df + ======= + private static final AtomicBoolean alreadyShutdown = new AtomicBoolean(false); + >>>>>>> TEMP_RIGHT_BRANCH +3:53,61c + + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final AtomicBoolean alreadyShutdown = new AtomicBoolean(false); + +==== +1:51,58c + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); +2:74,96c + <<<<<<< HEAD + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); + ||||||| d4efcf4df + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); + ======= + shutdown(); + >>>>>>> TEMP_RIGHT_BRANCH +3:67c + shutdown(); +==== +1:71a +2:110,136c + <<<<<<< HEAD + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + ||||||| d4efcf4df + ======= + public static void shutdown() { + if (!alreadyShutdown.compareAndSet(false, true)) { + return; + } + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ex) { + logger.error("An exception occurred when the HTTP client was closed : {}", + ExceptionUtil.getStackTrace(ex)); + } + logger.warn("[HttpClientManager] Destruction of the end"); + } + >>>>>>> TEMP_RIGHT_BRANCH + +3:81,107c + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + + public static void shutdown() { + if (!alreadyShutdown.compareAndSet(false, true)) { + return; + } + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ex) { + logger.error("An exception occurred when the HTTP client was closed : {}", + ExceptionUtil.getStackTrace(ex)); + } + logger.warn("[HttpClientManager] Destruction of the end"); + } + + diff --git a/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_myers/diff_JacksonUtils.java.txt b/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_myers/diff_JacksonUtils.java.txt new file mode 100644 index 0000000000..0e754a4ff8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_myers/diff_JacksonUtils.java.txt @@ -0,0 +1,257 @@ +====1 +1:18a +2:19,23c +3:19,23c + import com.alibaba.nacos.api.exception.runtime.NacosDeserializationException; + import com.alibaba.nacos.api.exception.runtime.NacosSerializationException; + import com.fasterxml.jackson.annotation.JsonInclude.Include; + import com.fasterxml.jackson.core.JsonProcessingException; + import com.fasterxml.jackson.core.type.TypeReference; +====1 +1:19a +2:25c +3:25c + import com.fasterxml.jackson.databind.JsonNode; +====1 +1:20a +2:27,29c +3:27,29c + import com.fasterxml.jackson.databind.jsontype.NamedType; + import com.fasterxml.jackson.databind.node.ArrayNode; + import com.fasterxml.jackson.databind.node.ObjectNode; +==== +1:21a +2:31,36c + <<<<<<< HEAD + import java.io.InputStream; + ||||||| d4efcf4df + ======= + import java.io.IOException; + >>>>>>> TEMP_RIGHT_BRANCH +3:31,32c + import java.io.InputStream; + import java.io.IOException +====1 +1:32a +2:48c +3:44c + mapper.setSerializationInclusion(Include.NON_NULL); +==== +1:35,49c + public static String toJson(Object obj) throws Exception { + return mapper.writeValueAsString(obj); + } + + public static byte[] toJsonBytes(Object obj) throws Exception { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } + + public static T toObj(byte[] json, Class cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(byte[] json, Type cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } +2:51,86c + public static String toJson(Object obj) { + try { + return mapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static byte[] toJsonBytes(Object obj) { + try { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static T toObj(byte[] json, Class cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(byte[] json, Type cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + <<<<<<< HEAD + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +3:47,82c + public static String toJson(Object obj) { + try { + return mapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static byte[] toJsonBytes(Object obj) { + try { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static T toObj(byte[] json, Class cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(byte[] json, Type cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +==== +1:53,57c + } + + public static T toObj(String json, Type type) throws Exception { + return mapper.readValue(json, mapper.constructType(type)); + } +2:90,151c + } + ||||||| d4efcf4df + public static T toObj(String json, Class cls) throws Exception { + return mapper.readValue(json, cls); + } + ======= + public static T toObj(byte[] json, TypeReference typeReference) { + try { + return toObj(StringUtils.newString4UTF8(json), typeReference); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + >>>>>>> TEMP_RIGHT_BRANCH + + public static T toObj(String json, Class cls) { + try { + return mapper.readValue(json, cls); + } catch (IOException e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(String json, Type type) { + try { + return mapper.readValue(json, mapper.constructType(type)); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, TypeReference typeReference) { + try { + return mapper.readValue(json, typeReference); + } catch (IOException e) { + throw new NacosDeserializationException(typeReference.getClass(), e); + } + } + + public static JsonNode toObj(String json) { + try { + return mapper.readTree(json); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static void registerSubtype(Class clz, String type) { + mapper.registerSubtypes(new NamedType(clz, type)); + } + + public static ObjectNode createEmptyJsonNode() { + return new ObjectNode(mapper.getNodeFactory()); + } + + public static ArrayNode createEmptyArrayNode() { + return new ArrayNode(mapper.getNodeFactory()); + } + + public static JsonNode transferToJsonNode(Object obj) { + return mapper.valueToTree(obj); + } +3:86,142c + } + + public static T toObj(byte[] json, TypeReference typeReference) { + try { + return toObj(StringUtils.newString4UTF8(json), typeReference); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, Class cls) { + try { + return mapper.readValue(json, cls); + } catch (IOException e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(String json, Type type) { + try { + return mapper.readValue(json, mapper.constructType(type)); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, TypeReference typeReference) { + try { + return mapper.readValue(json, typeReference); + } catch (IOException e) { + throw new NacosDeserializationException(typeReference.getClass(), e); + } + } + + public static JsonNode toObj(String json) { + try { + return mapper.readTree(json); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static void registerSubtype(Class clz, String type) { + mapper.registerSubtypes(new NamedType(clz, type)); + } + + public static ObjectNode createEmptyJsonNode() { + return new ObjectNode(mapper.getNodeFactory()); + } + + public static ArrayNode createEmptyArrayNode() { + return new ArrayNode(mapper.getNodeFactory()); + } + + public static JsonNode transferToJsonNode(Object obj) { + return mapper.valueToTree(obj); + } diff --git a/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_patience/diff_HttpClientManager.java.txt b/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_patience/diff_HttpClientManager.java.txt new file mode 100644 index 0000000000..98a510768a --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_patience/diff_HttpClientManager.java.txt @@ -0,0 +1,147 @@ +==== +1:18a +2:19,27c + <<<<<<< HEAD + import com.alibaba.nacos.common.http.client.ApacheAsyncHttpClientRequest; + import com.alibaba.nacos.common.http.client.ApacheHttpClientRequest; + import com.alibaba.nacos.common.http.client.NacosAsyncRestTemplate; + import com.alibaba.nacos.common.http.client.NacosRestTemplate; + ||||||| d4efcf4df + ======= + import com.alibaba.nacos.common.utils.ExceptionUtil; + >>>>>>> TEMP_RIGHT_BRANCH +3:19,22c + import com.alibaba.nacos.common.http.client.ApacheAsyncHttpClientRequest; + import com.alibaba.nacos.common.http.client.ApacheHttpClientRequest; + import com.alibaba.nacos.common.http.client.NacosAsyncRestTemplate; + import com.alibaba.nacos.common.http.client.NacosRestTemplate; +====3 +1:19a +2:28a +3:24c + import com.alibaba.nacos.common.utils.ExceptionUtil; +====1 +1:25a +2:35,36c +3:31,32c + import java.util.concurrent.atomic.AtomicBoolean; + +==== +1:45a +2:57,68c + + <<<<<<< HEAD + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + ||||||| d4efcf4df + ======= + private static final AtomicBoolean alreadyShutdown = new AtomicBoolean(false); + >>>>>>> TEMP_RIGHT_BRANCH +3:53,61c + + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final AtomicBoolean alreadyShutdown = new AtomicBoolean(false); + +==== +1:51,58c + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); +2:74,96c + <<<<<<< HEAD + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); + ||||||| d4efcf4df + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); + ======= + shutdown(); + >>>>>>> TEMP_RIGHT_BRANCH +3:67c + shutdown(); +==== +1:71a +2:110,136c + <<<<<<< HEAD + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + ||||||| d4efcf4df + ======= + public static void shutdown() { + if (!alreadyShutdown.compareAndSet(false, true)) { + return; + } + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ex) { + logger.error("An exception occurred when the HTTP client was closed : {}", + ExceptionUtil.getStackTrace(ex)); + } + logger.warn("[HttpClientManager] Destruction of the end"); + } + >>>>>>> TEMP_RIGHT_BRANCH + +3:81,107c + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + + public static void shutdown() { + if (!alreadyShutdown.compareAndSet(false, true)) { + return; + } + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ex) { + logger.error("An exception occurred when the HTTP client was closed : {}", + ExceptionUtil.getStackTrace(ex)); + } + logger.warn("[HttpClientManager] Destruction of the end"); + } + + diff --git a/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_patience/diff_JacksonUtils.java.txt b/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_patience/diff_JacksonUtils.java.txt new file mode 100644 index 0000000000..0e754a4ff8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/gitmerge_recursive_patience/diff_JacksonUtils.java.txt @@ -0,0 +1,257 @@ +====1 +1:18a +2:19,23c +3:19,23c + import com.alibaba.nacos.api.exception.runtime.NacosDeserializationException; + import com.alibaba.nacos.api.exception.runtime.NacosSerializationException; + import com.fasterxml.jackson.annotation.JsonInclude.Include; + import com.fasterxml.jackson.core.JsonProcessingException; + import com.fasterxml.jackson.core.type.TypeReference; +====1 +1:19a +2:25c +3:25c + import com.fasterxml.jackson.databind.JsonNode; +====1 +1:20a +2:27,29c +3:27,29c + import com.fasterxml.jackson.databind.jsontype.NamedType; + import com.fasterxml.jackson.databind.node.ArrayNode; + import com.fasterxml.jackson.databind.node.ObjectNode; +==== +1:21a +2:31,36c + <<<<<<< HEAD + import java.io.InputStream; + ||||||| d4efcf4df + ======= + import java.io.IOException; + >>>>>>> TEMP_RIGHT_BRANCH +3:31,32c + import java.io.InputStream; + import java.io.IOException +====1 +1:32a +2:48c +3:44c + mapper.setSerializationInclusion(Include.NON_NULL); +==== +1:35,49c + public static String toJson(Object obj) throws Exception { + return mapper.writeValueAsString(obj); + } + + public static byte[] toJsonBytes(Object obj) throws Exception { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } + + public static T toObj(byte[] json, Class cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(byte[] json, Type cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } +2:51,86c + public static String toJson(Object obj) { + try { + return mapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static byte[] toJsonBytes(Object obj) { + try { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static T toObj(byte[] json, Class cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(byte[] json, Type cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + <<<<<<< HEAD + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +3:47,82c + public static String toJson(Object obj) { + try { + return mapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static byte[] toJsonBytes(Object obj) { + try { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static T toObj(byte[] json, Class cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(byte[] json, Type cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +==== +1:53,57c + } + + public static T toObj(String json, Type type) throws Exception { + return mapper.readValue(json, mapper.constructType(type)); + } +2:90,151c + } + ||||||| d4efcf4df + public static T toObj(String json, Class cls) throws Exception { + return mapper.readValue(json, cls); + } + ======= + public static T toObj(byte[] json, TypeReference typeReference) { + try { + return toObj(StringUtils.newString4UTF8(json), typeReference); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + >>>>>>> TEMP_RIGHT_BRANCH + + public static T toObj(String json, Class cls) { + try { + return mapper.readValue(json, cls); + } catch (IOException e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(String json, Type type) { + try { + return mapper.readValue(json, mapper.constructType(type)); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, TypeReference typeReference) { + try { + return mapper.readValue(json, typeReference); + } catch (IOException e) { + throw new NacosDeserializationException(typeReference.getClass(), e); + } + } + + public static JsonNode toObj(String json) { + try { + return mapper.readTree(json); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static void registerSubtype(Class clz, String type) { + mapper.registerSubtypes(new NamedType(clz, type)); + } + + public static ObjectNode createEmptyJsonNode() { + return new ObjectNode(mapper.getNodeFactory()); + } + + public static ArrayNode createEmptyArrayNode() { + return new ArrayNode(mapper.getNodeFactory()); + } + + public static JsonNode transferToJsonNode(Object obj) { + return mapper.valueToTree(obj); + } +3:86,142c + } + + public static T toObj(byte[] json, TypeReference typeReference) { + try { + return toObj(StringUtils.newString4UTF8(json), typeReference); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, Class cls) { + try { + return mapper.readValue(json, cls); + } catch (IOException e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(String json, Type type) { + try { + return mapper.readValue(json, mapper.constructType(type)); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, TypeReference typeReference) { + try { + return mapper.readValue(json, typeReference); + } catch (IOException e) { + throw new NacosDeserializationException(typeReference.getClass(), e); + } + } + + public static JsonNode toObj(String json) { + try { + return mapper.readTree(json); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static void registerSubtype(Class clz, String type) { + mapper.registerSubtypes(new NamedType(clz, type)); + } + + public static ObjectNode createEmptyJsonNode() { + return new ObjectNode(mapper.getNodeFactory()); + } + + public static ArrayNode createEmptyArrayNode() { + return new ArrayNode(mapper.getNodeFactory()); + } + + public static JsonNode transferToJsonNode(Object obj) { + return mapper.valueToTree(obj); + } diff --git a/src/python/merge_conflict_analysis_diffs/65/intellimerge/diff_HttpClientManager.java.txt b/src/python/merge_conflict_analysis_diffs/65/intellimerge/diff_HttpClientManager.java.txt new file mode 100644 index 0000000000..7a840c373c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/intellimerge/diff_HttpClientManager.java.txt @@ -0,0 +1,152 @@ +==== +1:18a +2:19,27c + <<<<<<< HEAD + import com.alibaba.nacos.common.http.client.ApacheAsyncHttpClientRequest; + import com.alibaba.nacos.common.http.client.ApacheHttpClientRequest; + import com.alibaba.nacos.common.http.client.NacosAsyncRestTemplate; + import com.alibaba.nacos.common.http.client.NacosRestTemplate; + ||||||| d4efcf4df + ======= + import com.alibaba.nacos.common.utils.ExceptionUtil; + >>>>>>> TEMP_RIGHT_BRANCH +3:19,22c + import com.alibaba.nacos.common.http.client.ApacheAsyncHttpClientRequest; + import com.alibaba.nacos.common.http.client.ApacheHttpClientRequest; + import com.alibaba.nacos.common.http.client.NacosAsyncRestTemplate; + import com.alibaba.nacos.common.http.client.NacosRestTemplate; +====3 +1:19a +2:28a +3:24c + import com.alibaba.nacos.common.utils.ExceptionUtil; +====1 +1:25a +2:35,36c +3:31,32c + import java.util.concurrent.atomic.AtomicBoolean; + +==== +1:46c + +2:57,70c + + <<<<<<< HEAD + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + + ||||||| d4efcf4df + ======= + private static final AtomicBoolean alreadyShutdown = new AtomicBoolean(false); + + >>>>>>> TEMP_RIGHT_BRANCH +3:53,62c + + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final AtomicBoolean alreadyShutdown = new AtomicBoolean(false); + + +==== +1:51,58c + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); +2:75,97c + <<<<<<< HEAD + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); + ||||||| d4efcf4df + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); + ======= + shutdown(); + >>>>>>> TEMP_RIGHT_BRANCH +3:67c + shutdown(); +==== +1:71a +2:111,138c + <<<<<<< HEAD + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + + ||||||| d4efcf4df + ======= + public static void shutdown() { + if (!alreadyShutdown.compareAndSet(false, true)) { + return; + } + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ex) { + logger.error("An exception occurred when the HTTP client was closed : {}", + ExceptionUtil.getStackTrace(ex)); + } + logger.warn("[HttpClientManager] Destruction of the end"); + } + + >>>>>>> TEMP_RIGHT_BRANCH +3:81,107c + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + + public static void shutdown() { + if (!alreadyShutdown.compareAndSet(false, true)) { + return; + } + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ex) { + logger.error("An exception occurred when the HTTP client was closed : {}", + ExceptionUtil.getStackTrace(ex)); + } + logger.warn("[HttpClientManager] Destruction of the end"); + } + + diff --git a/src/python/merge_conflict_analysis_diffs/65/intellimerge/diff_JacksonUtils.java.txt b/src/python/merge_conflict_analysis_diffs/65/intellimerge/diff_JacksonUtils.java.txt new file mode 100644 index 0000000000..0e754a4ff8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/intellimerge/diff_JacksonUtils.java.txt @@ -0,0 +1,257 @@ +====1 +1:18a +2:19,23c +3:19,23c + import com.alibaba.nacos.api.exception.runtime.NacosDeserializationException; + import com.alibaba.nacos.api.exception.runtime.NacosSerializationException; + import com.fasterxml.jackson.annotation.JsonInclude.Include; + import com.fasterxml.jackson.core.JsonProcessingException; + import com.fasterxml.jackson.core.type.TypeReference; +====1 +1:19a +2:25c +3:25c + import com.fasterxml.jackson.databind.JsonNode; +====1 +1:20a +2:27,29c +3:27,29c + import com.fasterxml.jackson.databind.jsontype.NamedType; + import com.fasterxml.jackson.databind.node.ArrayNode; + import com.fasterxml.jackson.databind.node.ObjectNode; +==== +1:21a +2:31,36c + <<<<<<< HEAD + import java.io.InputStream; + ||||||| d4efcf4df + ======= + import java.io.IOException; + >>>>>>> TEMP_RIGHT_BRANCH +3:31,32c + import java.io.InputStream; + import java.io.IOException +====1 +1:32a +2:48c +3:44c + mapper.setSerializationInclusion(Include.NON_NULL); +==== +1:35,49c + public static String toJson(Object obj) throws Exception { + return mapper.writeValueAsString(obj); + } + + public static byte[] toJsonBytes(Object obj) throws Exception { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } + + public static T toObj(byte[] json, Class cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(byte[] json, Type cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } +2:51,86c + public static String toJson(Object obj) { + try { + return mapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static byte[] toJsonBytes(Object obj) { + try { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static T toObj(byte[] json, Class cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(byte[] json, Type cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + <<<<<<< HEAD + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +3:47,82c + public static String toJson(Object obj) { + try { + return mapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static byte[] toJsonBytes(Object obj) { + try { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static T toObj(byte[] json, Class cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(byte[] json, Type cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +==== +1:53,57c + } + + public static T toObj(String json, Type type) throws Exception { + return mapper.readValue(json, mapper.constructType(type)); + } +2:90,151c + } + ||||||| d4efcf4df + public static T toObj(String json, Class cls) throws Exception { + return mapper.readValue(json, cls); + } + ======= + public static T toObj(byte[] json, TypeReference typeReference) { + try { + return toObj(StringUtils.newString4UTF8(json), typeReference); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + >>>>>>> TEMP_RIGHT_BRANCH + + public static T toObj(String json, Class cls) { + try { + return mapper.readValue(json, cls); + } catch (IOException e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(String json, Type type) { + try { + return mapper.readValue(json, mapper.constructType(type)); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, TypeReference typeReference) { + try { + return mapper.readValue(json, typeReference); + } catch (IOException e) { + throw new NacosDeserializationException(typeReference.getClass(), e); + } + } + + public static JsonNode toObj(String json) { + try { + return mapper.readTree(json); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static void registerSubtype(Class clz, String type) { + mapper.registerSubtypes(new NamedType(clz, type)); + } + + public static ObjectNode createEmptyJsonNode() { + return new ObjectNode(mapper.getNodeFactory()); + } + + public static ArrayNode createEmptyArrayNode() { + return new ArrayNode(mapper.getNodeFactory()); + } + + public static JsonNode transferToJsonNode(Object obj) { + return mapper.valueToTree(obj); + } +3:86,142c + } + + public static T toObj(byte[] json, TypeReference typeReference) { + try { + return toObj(StringUtils.newString4UTF8(json), typeReference); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, Class cls) { + try { + return mapper.readValue(json, cls); + } catch (IOException e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(String json, Type type) { + try { + return mapper.readValue(json, mapper.constructType(type)); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, TypeReference typeReference) { + try { + return mapper.readValue(json, typeReference); + } catch (IOException e) { + throw new NacosDeserializationException(typeReference.getClass(), e); + } + } + + public static JsonNode toObj(String json) { + try { + return mapper.readTree(json); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static void registerSubtype(Class clz, String type) { + mapper.registerSubtypes(new NamedType(clz, type)); + } + + public static ObjectNode createEmptyJsonNode() { + return new ObjectNode(mapper.getNodeFactory()); + } + + public static ArrayNode createEmptyArrayNode() { + return new ArrayNode(mapper.getNodeFactory()); + } + + public static JsonNode transferToJsonNode(Object obj) { + return mapper.valueToTree(obj); + } diff --git a/src/python/merge_conflict_analysis_diffs/65/spork/diff_HttpClientManager.java.txt b/src/python/merge_conflict_analysis_diffs/65/spork/diff_HttpClientManager.java.txt new file mode 100644 index 0000000000..f2d16f1cdc --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/spork/diff_HttpClientManager.java.txt @@ -0,0 +1,101 @@ +====1 +1:18a +2:19,22c +3:19,22c + import com.alibaba.nacos.common.http.client.ApacheAsyncHttpClientRequest; + import com.alibaba.nacos.common.http.client.ApacheHttpClientRequest; + import com.alibaba.nacos.common.http.client.NacosAsyncRestTemplate; + import com.alibaba.nacos.common.http.client.NacosRestTemplate; +====3 +1:19a +2:23a +3:24c + import com.alibaba.nacos.common.utils.ExceptionUtil; +====3 +1:25a +2:29a +3:31,32c + import java.util.concurrent.atomic.AtomicBoolean; + +==== +1:45a +2:50,56c + + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + +3:53,61c + + private static final NacosRestTemplate NACOS_REST_TEMPLATE = new NacosRestTemplate( + new ApacheHttpClientRequest(HttpClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final NacosAsyncRestTemplate NACOS_ASYNC_REST_TEMPLATE = new NacosAsyncRestTemplate( + new ApacheAsyncHttpClientRequest(HttpAsyncClients.custom().setDefaultRequestConfig(DEFAULT_CONFIG).build())); + + private static final AtomicBoolean alreadyShutdown = new AtomicBoolean(false); + +==== +1:51,58c + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); +2:62,71c + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ignore) { + } + logger.warn("[HttpClientManager] Destruction of the end"); +3:67c + shutdown(); +==== +1:71a +2:85,92c + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + +3:81,107c + public static NacosRestTemplate getNacosRestTemplate() { + return NACOS_REST_TEMPLATE; + } + + public static NacosAsyncRestTemplate getNacosAsyncRestTemplate() { + return NACOS_ASYNC_REST_TEMPLATE; + } + + public static void shutdown() { + if (!alreadyShutdown.compareAndSet(false, true)) { + return; + } + logger.warn("[HttpClientManager] Start destroying HttpClient"); + try { + SYNC_HTTP_CLIENT.close(); + ASYNC_HTTP_CLIENT.close(); + NACOS_REST_TEMPLATE.close(); + NACOS_ASYNC_REST_TEMPLATE.close(); + } + catch (Exception ex) { + logger.error("An exception occurred when the HTTP client was closed : {}", + ExceptionUtil.getStackTrace(ex)); + } + logger.warn("[HttpClientManager] Destruction of the end"); + } + + diff --git a/src/python/merge_conflict_analysis_diffs/65/spork/diff_JacksonUtils.java.txt b/src/python/merge_conflict_analysis_diffs/65/spork/diff_JacksonUtils.java.txt new file mode 100644 index 0000000000..4362955cdf --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/65/spork/diff_JacksonUtils.java.txt @@ -0,0 +1,175 @@ +====3 +1:18a +2:18a +3:19,23c + import com.alibaba.nacos.api.exception.runtime.NacosDeserializationException; + import com.alibaba.nacos.api.exception.runtime.NacosSerializationException; + import com.fasterxml.jackson.annotation.JsonInclude.Include; + import com.fasterxml.jackson.core.JsonProcessingException; + import com.fasterxml.jackson.core.type.TypeReference; +====3 +1:19a +2:19a +3:25c + import com.fasterxml.jackson.databind.JsonNode; +====3 +1:20a +2:20a +3:27,29c + import com.fasterxml.jackson.databind.jsontype.NamedType; + import com.fasterxml.jackson.databind.node.ArrayNode; + import com.fasterxml.jackson.databind.node.ObjectNode; +==== +1:21a +2:22c + import java.io.InputStream; +3:31,32c + import java.io.InputStream; + import java.io.IOException +====3 +1:32a +2:33a +3:44c + mapper.setSerializationInclusion(Include.NON_NULL); +==== +1:35,49c + public static String toJson(Object obj) throws Exception { + return mapper.writeValueAsString(obj); + } + + public static byte[] toJsonBytes(Object obj) throws Exception { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } + + public static T toObj(byte[] json, Class cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(byte[] json, Type cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } +2:36,54c + public static String toJson(Object obj) throws Exception { + return mapper.writeValueAsString(obj); + } + + public static byte[] toJsonBytes(Object obj) throws Exception { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } + + public static T toObj(byte[] json, Class cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(byte[] json, Type cls) throws Exception { + return toObj(StringUtils.newString4UTF8(json), cls); + } + + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +3:47,82c + public static String toJson(Object obj) { + try { + return mapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static byte[] toJsonBytes(Object obj) { + try { + return ByteUtils.toBytes(mapper.writeValueAsString(obj)); + } catch (JsonProcessingException e) { + throw new NacosSerializationException(obj.getClass(), e); + } + } + + public static T toObj(byte[] json, Class cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(byte[] json, Type cls) { + try { + return toObj(StringUtils.newString4UTF8(json), cls); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + + public static T toObj(InputStream inputStream, Class tClass) throws Exception { + return mapper.readValue(inputStream, tClass); + } +====3 +1:53c +2:58c + } +3:86c + } +====3 +1:55,57c +2:60,62c + public static T toObj(String json, Type type) throws Exception { + return mapper.readValue(json, mapper.constructType(type)); + } +3:88,142c + public static T toObj(byte[] json, TypeReference typeReference) { + try { + return toObj(StringUtils.newString4UTF8(json), typeReference); + } catch (Exception e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, Class cls) { + try { + return mapper.readValue(json, cls); + } catch (IOException e) { + throw new NacosDeserializationException(cls, e); + } + } + + public static T toObj(String json, Type type) { + try { + return mapper.readValue(json, mapper.constructType(type)); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static T toObj(String json, TypeReference typeReference) { + try { + return mapper.readValue(json, typeReference); + } catch (IOException e) { + throw new NacosDeserializationException(typeReference.getClass(), e); + } + } + + public static JsonNode toObj(String json) { + try { + return mapper.readTree(json); + } catch (IOException e) { + throw new NacosDeserializationException(e); + } + } + + public static void registerSubtype(Class clz, String type) { + mapper.registerSubtypes(new NamedType(clz, type)); + } + + public static ObjectNode createEmptyJsonNode() { + return new ObjectNode(mapper.getNodeFactory()); + } + + public static ArrayNode createEmptyArrayNode() { + return new ArrayNode(mapper.getNodeFactory()); + } + + public static JsonNode transferToJsonNode(Object obj) { + return mapper.valueToTree(obj); + } diff --git a/src/python/merge_conflict_analysis_diffs/654/git_hires_merge/diff_RoleServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/654/git_hires_merge/diff_RoleServiceImpl.java.txt new file mode 100644 index 0000000000..a91b632048 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/654/git_hires_merge/diff_RoleServiceImpl.java.txt @@ -0,0 +1,154 @@ +====1 +1:17a +2:18c +3:18c + import cn.hutool.core.collection.CollectionUtil; +====1 +1:19a +2:21c +3:21c + import me.zhengjie.modules.security.service.UserCacheClean; +====1 +1:42a +2:45c +3:45c + +====1 +1:61a +2:65c +3:65c + private final UserCacheClean userCacheClean; +====1 +1:71c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder))); +2:75c +3:75c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder))); +====1 +1:76c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder),pageable); +2:80c +3:80c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder), pageable); +====1 +1:85c + ValidationUtil.isNull(role.getId(),"Role","id",id); +2:89c +3:89c + ValidationUtil.isNull(role.getId(), "Role", "id", id); +====1 +1:92,93c + if(roleRepository.findByName(resources.getName()) != null){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:96,97c +3:96,97c + if (roleRepository.findByName(resources.getName()) != null) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:102c + ValidationUtil.isNull(role.getId(),"Role","id",resources.getId()); +2:106c +3:106c + ValidationUtil.isNull(role.getId(), "Role", "id", resources.getId()); +====1 +1:106,107c + if(role1 != null && !role1.getId().equals(role.getId())){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:110,111c +3:110,111c + if (role1 != null && !role1.getId().equals(role.getId())) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:123c + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); +2:126a +3:126a +====1 +1:126,128c + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.del("role::id:" + resources.getId()); +2:129c +3:129c + cleanCache(resources, users); +====1 +1:131a +2:133c +3:133c + +====1 +1:168c + if(user.getIsAdmin()){ +2:170c +3:170c + if (user.getIsAdmin()) { +====1 +1:185c + Map map = new LinkedHashMap<>(); +2:187c +3:187c + Map map = new LinkedHashMap<>(); +====1 +1:195,206c + /** + * 清理缓存 + * @param id / + */ + public void delCaches(Long id){ + List users = userRepository.findByRoleId(id); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys("data::user:",userIds); + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.delByKeys("role::auth:",userIds); + } + +2:196a +3:196a +====1 +1:209c + if(userRepository.countByRoles(ids) > 0){ +2:199c +3:199c + if (userRepository.countByRoles(ids) > 0) { +====1 +1:217a +2:208,245c +3:208,245c + + /** + * 清理缓存 + * + * @param id / + */ + public void delCaches(Long id) { + List users = userRepository.findByRoleId(id); + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.DATE_USER, userIds); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.delByKeys(CacheKey.ROLE_AUTH, userIds); + } + + } + + /** + * 清理缓存 + * + * @param resources + * @param users + */ + private void cleanCache(Role resources, List users) { + // 清理缓存 + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.del(CacheKey.ROLE_ID + resources.getId()); + } + } + diff --git a/src/python/merge_conflict_analysis_diffs/654/gitmerge_ort/diff_RoleServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/654/gitmerge_ort/diff_RoleServiceImpl.java.txt new file mode 100644 index 0000000000..d58b3eedc8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/654/gitmerge_ort/diff_RoleServiceImpl.java.txt @@ -0,0 +1,166 @@ +====1 +1:17a +2:18c +3:18c + import cn.hutool.core.collection.CollectionUtil; +====1 +1:19a +2:21c +3:21c + import me.zhengjie.modules.security.service.UserCacheClean; +====1 +1:42a +2:45c +3:45c + +====1 +1:61a +2:65c +3:65c + private final UserCacheClean userCacheClean; +====1 +1:71c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder))); +2:75c +3:75c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder))); +====1 +1:76c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder),pageable); +2:80c +3:80c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder), pageable); +====1 +1:85c + ValidationUtil.isNull(role.getId(),"Role","id",id); +2:89c +3:89c + ValidationUtil.isNull(role.getId(), "Role", "id", id); +====1 +1:92,93c + if(roleRepository.findByName(resources.getName()) != null){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:96,97c +3:96,97c + if (roleRepository.findByName(resources.getName()) != null) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:102c + ValidationUtil.isNull(role.getId(),"Role","id",resources.getId()); +2:106c +3:106c + ValidationUtil.isNull(role.getId(), "Role", "id", resources.getId()); +====1 +1:106,107c + if(role1 != null && !role1.getId().equals(role.getId())){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:110,111c +3:110,111c + if (role1 != null && !role1.getId().equals(role.getId())) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:123c + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); +2:126a +3:126a +==== +1:126,128c + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.del("role::id:" + resources.getId()); +2:129,140c + <<<<<<< HEAD + cleanCache(resources, users); + ||||||| 0cd4ff90 + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.del("role::id:" + resources.getId()); + ======= + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.delByKeys("role::auth:",userIds); + redisUtils.del("role::id:" + resources.getId()); + >>>>>>> TEMP_RIGHT_BRANCH +3:129c + cleanCache(resources, users); +====1 +1:131a +2:144c +3:133c + +====1 +1:168c + if(user.getIsAdmin()){ +2:181c +3:170c + if (user.getIsAdmin()) { +====1 +1:185c + Map map = new LinkedHashMap<>(); +2:198c +3:187c + Map map = new LinkedHashMap<>(); +====1 +1:195,206c + /** + * 清理缓存 + * @param id / + */ + public void delCaches(Long id){ + List users = userRepository.findByRoleId(id); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys("data::user:",userIds); + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.delByKeys("role::auth:",userIds); + } + +2:207a +3:196a +====1 +1:209c + if(userRepository.countByRoles(ids) > 0){ +2:210c +3:199c + if (userRepository.countByRoles(ids) > 0) { +====1 +1:217a +2:219,256c +3:208,245c + + /** + * 清理缓存 + * + * @param id / + */ + public void delCaches(Long id) { + List users = userRepository.findByRoleId(id); + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.DATE_USER, userIds); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.delByKeys(CacheKey.ROLE_AUTH, userIds); + } + + } + + /** + * 清理缓存 + * + * @param resources + * @param users + */ + private void cleanCache(Role resources, List users) { + // 清理缓存 + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.del(CacheKey.ROLE_ID + resources.getId()); + } + } + diff --git a/src/python/merge_conflict_analysis_diffs/654/gitmerge_ort_adjacent/diff_RoleServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/654/gitmerge_ort_adjacent/diff_RoleServiceImpl.java.txt new file mode 100644 index 0000000000..a91b632048 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/654/gitmerge_ort_adjacent/diff_RoleServiceImpl.java.txt @@ -0,0 +1,154 @@ +====1 +1:17a +2:18c +3:18c + import cn.hutool.core.collection.CollectionUtil; +====1 +1:19a +2:21c +3:21c + import me.zhengjie.modules.security.service.UserCacheClean; +====1 +1:42a +2:45c +3:45c + +====1 +1:61a +2:65c +3:65c + private final UserCacheClean userCacheClean; +====1 +1:71c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder))); +2:75c +3:75c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder))); +====1 +1:76c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder),pageable); +2:80c +3:80c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder), pageable); +====1 +1:85c + ValidationUtil.isNull(role.getId(),"Role","id",id); +2:89c +3:89c + ValidationUtil.isNull(role.getId(), "Role", "id", id); +====1 +1:92,93c + if(roleRepository.findByName(resources.getName()) != null){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:96,97c +3:96,97c + if (roleRepository.findByName(resources.getName()) != null) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:102c + ValidationUtil.isNull(role.getId(),"Role","id",resources.getId()); +2:106c +3:106c + ValidationUtil.isNull(role.getId(), "Role", "id", resources.getId()); +====1 +1:106,107c + if(role1 != null && !role1.getId().equals(role.getId())){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:110,111c +3:110,111c + if (role1 != null && !role1.getId().equals(role.getId())) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:123c + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); +2:126a +3:126a +====1 +1:126,128c + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.del("role::id:" + resources.getId()); +2:129c +3:129c + cleanCache(resources, users); +====1 +1:131a +2:133c +3:133c + +====1 +1:168c + if(user.getIsAdmin()){ +2:170c +3:170c + if (user.getIsAdmin()) { +====1 +1:185c + Map map = new LinkedHashMap<>(); +2:187c +3:187c + Map map = new LinkedHashMap<>(); +====1 +1:195,206c + /** + * 清理缓存 + * @param id / + */ + public void delCaches(Long id){ + List users = userRepository.findByRoleId(id); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys("data::user:",userIds); + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.delByKeys("role::auth:",userIds); + } + +2:196a +3:196a +====1 +1:209c + if(userRepository.countByRoles(ids) > 0){ +2:199c +3:199c + if (userRepository.countByRoles(ids) > 0) { +====1 +1:217a +2:208,245c +3:208,245c + + /** + * 清理缓存 + * + * @param id / + */ + public void delCaches(Long id) { + List users = userRepository.findByRoleId(id); + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.DATE_USER, userIds); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.delByKeys(CacheKey.ROLE_AUTH, userIds); + } + + } + + /** + * 清理缓存 + * + * @param resources + * @param users + */ + private void cleanCache(Role resources, List users) { + // 清理缓存 + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.del(CacheKey.ROLE_ID + resources.getId()); + } + } + diff --git a/src/python/merge_conflict_analysis_diffs/654/gitmerge_ort_ignorespace/diff_RoleServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/654/gitmerge_ort_ignorespace/diff_RoleServiceImpl.java.txt new file mode 100644 index 0000000000..d58b3eedc8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/654/gitmerge_ort_ignorespace/diff_RoleServiceImpl.java.txt @@ -0,0 +1,166 @@ +====1 +1:17a +2:18c +3:18c + import cn.hutool.core.collection.CollectionUtil; +====1 +1:19a +2:21c +3:21c + import me.zhengjie.modules.security.service.UserCacheClean; +====1 +1:42a +2:45c +3:45c + +====1 +1:61a +2:65c +3:65c + private final UserCacheClean userCacheClean; +====1 +1:71c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder))); +2:75c +3:75c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder))); +====1 +1:76c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder),pageable); +2:80c +3:80c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder), pageable); +====1 +1:85c + ValidationUtil.isNull(role.getId(),"Role","id",id); +2:89c +3:89c + ValidationUtil.isNull(role.getId(), "Role", "id", id); +====1 +1:92,93c + if(roleRepository.findByName(resources.getName()) != null){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:96,97c +3:96,97c + if (roleRepository.findByName(resources.getName()) != null) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:102c + ValidationUtil.isNull(role.getId(),"Role","id",resources.getId()); +2:106c +3:106c + ValidationUtil.isNull(role.getId(), "Role", "id", resources.getId()); +====1 +1:106,107c + if(role1 != null && !role1.getId().equals(role.getId())){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:110,111c +3:110,111c + if (role1 != null && !role1.getId().equals(role.getId())) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:123c + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); +2:126a +3:126a +==== +1:126,128c + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.del("role::id:" + resources.getId()); +2:129,140c + <<<<<<< HEAD + cleanCache(resources, users); + ||||||| 0cd4ff90 + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.del("role::id:" + resources.getId()); + ======= + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.delByKeys("role::auth:",userIds); + redisUtils.del("role::id:" + resources.getId()); + >>>>>>> TEMP_RIGHT_BRANCH +3:129c + cleanCache(resources, users); +====1 +1:131a +2:144c +3:133c + +====1 +1:168c + if(user.getIsAdmin()){ +2:181c +3:170c + if (user.getIsAdmin()) { +====1 +1:185c + Map map = new LinkedHashMap<>(); +2:198c +3:187c + Map map = new LinkedHashMap<>(); +====1 +1:195,206c + /** + * 清理缓存 + * @param id / + */ + public void delCaches(Long id){ + List users = userRepository.findByRoleId(id); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys("data::user:",userIds); + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.delByKeys("role::auth:",userIds); + } + +2:207a +3:196a +====1 +1:209c + if(userRepository.countByRoles(ids) > 0){ +2:210c +3:199c + if (userRepository.countByRoles(ids) > 0) { +====1 +1:217a +2:219,256c +3:208,245c + + /** + * 清理缓存 + * + * @param id / + */ + public void delCaches(Long id) { + List users = userRepository.findByRoleId(id); + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.DATE_USER, userIds); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.delByKeys(CacheKey.ROLE_AUTH, userIds); + } + + } + + /** + * 清理缓存 + * + * @param resources + * @param users + */ + private void cleanCache(Role resources, List users) { + // 清理缓存 + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.del(CacheKey.ROLE_ID + resources.getId()); + } + } + diff --git a/src/python/merge_conflict_analysis_diffs/654/gitmerge_ort_imports/diff_RoleServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/654/gitmerge_ort_imports/diff_RoleServiceImpl.java.txt new file mode 100644 index 0000000000..a91b632048 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/654/gitmerge_ort_imports/diff_RoleServiceImpl.java.txt @@ -0,0 +1,154 @@ +====1 +1:17a +2:18c +3:18c + import cn.hutool.core.collection.CollectionUtil; +====1 +1:19a +2:21c +3:21c + import me.zhengjie.modules.security.service.UserCacheClean; +====1 +1:42a +2:45c +3:45c + +====1 +1:61a +2:65c +3:65c + private final UserCacheClean userCacheClean; +====1 +1:71c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder))); +2:75c +3:75c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder))); +====1 +1:76c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder),pageable); +2:80c +3:80c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder), pageable); +====1 +1:85c + ValidationUtil.isNull(role.getId(),"Role","id",id); +2:89c +3:89c + ValidationUtil.isNull(role.getId(), "Role", "id", id); +====1 +1:92,93c + if(roleRepository.findByName(resources.getName()) != null){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:96,97c +3:96,97c + if (roleRepository.findByName(resources.getName()) != null) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:102c + ValidationUtil.isNull(role.getId(),"Role","id",resources.getId()); +2:106c +3:106c + ValidationUtil.isNull(role.getId(), "Role", "id", resources.getId()); +====1 +1:106,107c + if(role1 != null && !role1.getId().equals(role.getId())){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:110,111c +3:110,111c + if (role1 != null && !role1.getId().equals(role.getId())) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:123c + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); +2:126a +3:126a +====1 +1:126,128c + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.del("role::id:" + resources.getId()); +2:129c +3:129c + cleanCache(resources, users); +====1 +1:131a +2:133c +3:133c + +====1 +1:168c + if(user.getIsAdmin()){ +2:170c +3:170c + if (user.getIsAdmin()) { +====1 +1:185c + Map map = new LinkedHashMap<>(); +2:187c +3:187c + Map map = new LinkedHashMap<>(); +====1 +1:195,206c + /** + * 清理缓存 + * @param id / + */ + public void delCaches(Long id){ + List users = userRepository.findByRoleId(id); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys("data::user:",userIds); + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.delByKeys("role::auth:",userIds); + } + +2:196a +3:196a +====1 +1:209c + if(userRepository.countByRoles(ids) > 0){ +2:199c +3:199c + if (userRepository.countByRoles(ids) > 0) { +====1 +1:217a +2:208,245c +3:208,245c + + /** + * 清理缓存 + * + * @param id / + */ + public void delCaches(Long id) { + List users = userRepository.findByRoleId(id); + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.DATE_USER, userIds); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.delByKeys(CacheKey.ROLE_AUTH, userIds); + } + + } + + /** + * 清理缓存 + * + * @param resources + * @param users + */ + private void cleanCache(Role resources, List users) { + // 清理缓存 + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.del(CacheKey.ROLE_ID + resources.getId()); + } + } + diff --git a/src/python/merge_conflict_analysis_diffs/654/gitmerge_ort_imports_ignorespace/diff_RoleServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/654/gitmerge_ort_imports_ignorespace/diff_RoleServiceImpl.java.txt new file mode 100644 index 0000000000..a91b632048 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/654/gitmerge_ort_imports_ignorespace/diff_RoleServiceImpl.java.txt @@ -0,0 +1,154 @@ +====1 +1:17a +2:18c +3:18c + import cn.hutool.core.collection.CollectionUtil; +====1 +1:19a +2:21c +3:21c + import me.zhengjie.modules.security.service.UserCacheClean; +====1 +1:42a +2:45c +3:45c + +====1 +1:61a +2:65c +3:65c + private final UserCacheClean userCacheClean; +====1 +1:71c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder))); +2:75c +3:75c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder))); +====1 +1:76c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder),pageable); +2:80c +3:80c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder), pageable); +====1 +1:85c + ValidationUtil.isNull(role.getId(),"Role","id",id); +2:89c +3:89c + ValidationUtil.isNull(role.getId(), "Role", "id", id); +====1 +1:92,93c + if(roleRepository.findByName(resources.getName()) != null){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:96,97c +3:96,97c + if (roleRepository.findByName(resources.getName()) != null) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:102c + ValidationUtil.isNull(role.getId(),"Role","id",resources.getId()); +2:106c +3:106c + ValidationUtil.isNull(role.getId(), "Role", "id", resources.getId()); +====1 +1:106,107c + if(role1 != null && !role1.getId().equals(role.getId())){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:110,111c +3:110,111c + if (role1 != null && !role1.getId().equals(role.getId())) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:123c + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); +2:126a +3:126a +====1 +1:126,128c + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.del("role::id:" + resources.getId()); +2:129c +3:129c + cleanCache(resources, users); +====1 +1:131a +2:133c +3:133c + +====1 +1:168c + if(user.getIsAdmin()){ +2:170c +3:170c + if (user.getIsAdmin()) { +====1 +1:185c + Map map = new LinkedHashMap<>(); +2:187c +3:187c + Map map = new LinkedHashMap<>(); +====1 +1:195,206c + /** + * 清理缓存 + * @param id / + */ + public void delCaches(Long id){ + List users = userRepository.findByRoleId(id); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys("data::user:",userIds); + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.delByKeys("role::auth:",userIds); + } + +2:196a +3:196a +====1 +1:209c + if(userRepository.countByRoles(ids) > 0){ +2:199c +3:199c + if (userRepository.countByRoles(ids) > 0) { +====1 +1:217a +2:208,245c +3:208,245c + + /** + * 清理缓存 + * + * @param id / + */ + public void delCaches(Long id) { + List users = userRepository.findByRoleId(id); + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.DATE_USER, userIds); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.delByKeys(CacheKey.ROLE_AUTH, userIds); + } + + } + + /** + * 清理缓存 + * + * @param resources + * @param users + */ + private void cleanCache(Role resources, List users) { + // 清理缓存 + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.del(CacheKey.ROLE_ID + resources.getId()); + } + } + diff --git a/src/python/merge_conflict_analysis_diffs/654/gitmerge_recursive_histogram/diff_RoleServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/654/gitmerge_recursive_histogram/diff_RoleServiceImpl.java.txt new file mode 100644 index 0000000000..d58b3eedc8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/654/gitmerge_recursive_histogram/diff_RoleServiceImpl.java.txt @@ -0,0 +1,166 @@ +====1 +1:17a +2:18c +3:18c + import cn.hutool.core.collection.CollectionUtil; +====1 +1:19a +2:21c +3:21c + import me.zhengjie.modules.security.service.UserCacheClean; +====1 +1:42a +2:45c +3:45c + +====1 +1:61a +2:65c +3:65c + private final UserCacheClean userCacheClean; +====1 +1:71c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder))); +2:75c +3:75c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder))); +====1 +1:76c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder),pageable); +2:80c +3:80c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder), pageable); +====1 +1:85c + ValidationUtil.isNull(role.getId(),"Role","id",id); +2:89c +3:89c + ValidationUtil.isNull(role.getId(), "Role", "id", id); +====1 +1:92,93c + if(roleRepository.findByName(resources.getName()) != null){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:96,97c +3:96,97c + if (roleRepository.findByName(resources.getName()) != null) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:102c + ValidationUtil.isNull(role.getId(),"Role","id",resources.getId()); +2:106c +3:106c + ValidationUtil.isNull(role.getId(), "Role", "id", resources.getId()); +====1 +1:106,107c + if(role1 != null && !role1.getId().equals(role.getId())){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:110,111c +3:110,111c + if (role1 != null && !role1.getId().equals(role.getId())) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:123c + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); +2:126a +3:126a +==== +1:126,128c + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.del("role::id:" + resources.getId()); +2:129,140c + <<<<<<< HEAD + cleanCache(resources, users); + ||||||| 0cd4ff90 + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.del("role::id:" + resources.getId()); + ======= + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.delByKeys("role::auth:",userIds); + redisUtils.del("role::id:" + resources.getId()); + >>>>>>> TEMP_RIGHT_BRANCH +3:129c + cleanCache(resources, users); +====1 +1:131a +2:144c +3:133c + +====1 +1:168c + if(user.getIsAdmin()){ +2:181c +3:170c + if (user.getIsAdmin()) { +====1 +1:185c + Map map = new LinkedHashMap<>(); +2:198c +3:187c + Map map = new LinkedHashMap<>(); +====1 +1:195,206c + /** + * 清理缓存 + * @param id / + */ + public void delCaches(Long id){ + List users = userRepository.findByRoleId(id); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys("data::user:",userIds); + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.delByKeys("role::auth:",userIds); + } + +2:207a +3:196a +====1 +1:209c + if(userRepository.countByRoles(ids) > 0){ +2:210c +3:199c + if (userRepository.countByRoles(ids) > 0) { +====1 +1:217a +2:219,256c +3:208,245c + + /** + * 清理缓存 + * + * @param id / + */ + public void delCaches(Long id) { + List users = userRepository.findByRoleId(id); + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.DATE_USER, userIds); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.delByKeys(CacheKey.ROLE_AUTH, userIds); + } + + } + + /** + * 清理缓存 + * + * @param resources + * @param users + */ + private void cleanCache(Role resources, List users) { + // 清理缓存 + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.del(CacheKey.ROLE_ID + resources.getId()); + } + } + diff --git a/src/python/merge_conflict_analysis_diffs/654/gitmerge_recursive_ignorespace/diff_RoleServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/654/gitmerge_recursive_ignorespace/diff_RoleServiceImpl.java.txt new file mode 100644 index 0000000000..d58b3eedc8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/654/gitmerge_recursive_ignorespace/diff_RoleServiceImpl.java.txt @@ -0,0 +1,166 @@ +====1 +1:17a +2:18c +3:18c + import cn.hutool.core.collection.CollectionUtil; +====1 +1:19a +2:21c +3:21c + import me.zhengjie.modules.security.service.UserCacheClean; +====1 +1:42a +2:45c +3:45c + +====1 +1:61a +2:65c +3:65c + private final UserCacheClean userCacheClean; +====1 +1:71c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder))); +2:75c +3:75c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder))); +====1 +1:76c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder),pageable); +2:80c +3:80c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder), pageable); +====1 +1:85c + ValidationUtil.isNull(role.getId(),"Role","id",id); +2:89c +3:89c + ValidationUtil.isNull(role.getId(), "Role", "id", id); +====1 +1:92,93c + if(roleRepository.findByName(resources.getName()) != null){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:96,97c +3:96,97c + if (roleRepository.findByName(resources.getName()) != null) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:102c + ValidationUtil.isNull(role.getId(),"Role","id",resources.getId()); +2:106c +3:106c + ValidationUtil.isNull(role.getId(), "Role", "id", resources.getId()); +====1 +1:106,107c + if(role1 != null && !role1.getId().equals(role.getId())){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:110,111c +3:110,111c + if (role1 != null && !role1.getId().equals(role.getId())) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:123c + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); +2:126a +3:126a +==== +1:126,128c + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.del("role::id:" + resources.getId()); +2:129,140c + <<<<<<< HEAD + cleanCache(resources, users); + ||||||| 0cd4ff90 + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.del("role::id:" + resources.getId()); + ======= + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.delByKeys("role::auth:",userIds); + redisUtils.del("role::id:" + resources.getId()); + >>>>>>> TEMP_RIGHT_BRANCH +3:129c + cleanCache(resources, users); +====1 +1:131a +2:144c +3:133c + +====1 +1:168c + if(user.getIsAdmin()){ +2:181c +3:170c + if (user.getIsAdmin()) { +====1 +1:185c + Map map = new LinkedHashMap<>(); +2:198c +3:187c + Map map = new LinkedHashMap<>(); +====1 +1:195,206c + /** + * 清理缓存 + * @param id / + */ + public void delCaches(Long id){ + List users = userRepository.findByRoleId(id); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys("data::user:",userIds); + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.delByKeys("role::auth:",userIds); + } + +2:207a +3:196a +====1 +1:209c + if(userRepository.countByRoles(ids) > 0){ +2:210c +3:199c + if (userRepository.countByRoles(ids) > 0) { +====1 +1:217a +2:219,256c +3:208,245c + + /** + * 清理缓存 + * + * @param id / + */ + public void delCaches(Long id) { + List users = userRepository.findByRoleId(id); + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.DATE_USER, userIds); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.delByKeys(CacheKey.ROLE_AUTH, userIds); + } + + } + + /** + * 清理缓存 + * + * @param resources + * @param users + */ + private void cleanCache(Role resources, List users) { + // 清理缓存 + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.del(CacheKey.ROLE_ID + resources.getId()); + } + } + diff --git a/src/python/merge_conflict_analysis_diffs/654/gitmerge_recursive_minimal/diff_RoleServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/654/gitmerge_recursive_minimal/diff_RoleServiceImpl.java.txt new file mode 100644 index 0000000000..d58b3eedc8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/654/gitmerge_recursive_minimal/diff_RoleServiceImpl.java.txt @@ -0,0 +1,166 @@ +====1 +1:17a +2:18c +3:18c + import cn.hutool.core.collection.CollectionUtil; +====1 +1:19a +2:21c +3:21c + import me.zhengjie.modules.security.service.UserCacheClean; +====1 +1:42a +2:45c +3:45c + +====1 +1:61a +2:65c +3:65c + private final UserCacheClean userCacheClean; +====1 +1:71c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder))); +2:75c +3:75c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder))); +====1 +1:76c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder),pageable); +2:80c +3:80c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder), pageable); +====1 +1:85c + ValidationUtil.isNull(role.getId(),"Role","id",id); +2:89c +3:89c + ValidationUtil.isNull(role.getId(), "Role", "id", id); +====1 +1:92,93c + if(roleRepository.findByName(resources.getName()) != null){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:96,97c +3:96,97c + if (roleRepository.findByName(resources.getName()) != null) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:102c + ValidationUtil.isNull(role.getId(),"Role","id",resources.getId()); +2:106c +3:106c + ValidationUtil.isNull(role.getId(), "Role", "id", resources.getId()); +====1 +1:106,107c + if(role1 != null && !role1.getId().equals(role.getId())){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:110,111c +3:110,111c + if (role1 != null && !role1.getId().equals(role.getId())) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:123c + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); +2:126a +3:126a +==== +1:126,128c + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.del("role::id:" + resources.getId()); +2:129,140c + <<<<<<< HEAD + cleanCache(resources, users); + ||||||| 0cd4ff90 + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.del("role::id:" + resources.getId()); + ======= + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.delByKeys("role::auth:",userIds); + redisUtils.del("role::id:" + resources.getId()); + >>>>>>> TEMP_RIGHT_BRANCH +3:129c + cleanCache(resources, users); +====1 +1:131a +2:144c +3:133c + +====1 +1:168c + if(user.getIsAdmin()){ +2:181c +3:170c + if (user.getIsAdmin()) { +====1 +1:185c + Map map = new LinkedHashMap<>(); +2:198c +3:187c + Map map = new LinkedHashMap<>(); +====1 +1:195,206c + /** + * 清理缓存 + * @param id / + */ + public void delCaches(Long id){ + List users = userRepository.findByRoleId(id); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys("data::user:",userIds); + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.delByKeys("role::auth:",userIds); + } + +2:207a +3:196a +====1 +1:209c + if(userRepository.countByRoles(ids) > 0){ +2:210c +3:199c + if (userRepository.countByRoles(ids) > 0) { +====1 +1:217a +2:219,256c +3:208,245c + + /** + * 清理缓存 + * + * @param id / + */ + public void delCaches(Long id) { + List users = userRepository.findByRoleId(id); + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.DATE_USER, userIds); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.delByKeys(CacheKey.ROLE_AUTH, userIds); + } + + } + + /** + * 清理缓存 + * + * @param resources + * @param users + */ + private void cleanCache(Role resources, List users) { + // 清理缓存 + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.del(CacheKey.ROLE_ID + resources.getId()); + } + } + diff --git a/src/python/merge_conflict_analysis_diffs/654/gitmerge_recursive_myers/diff_RoleServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/654/gitmerge_recursive_myers/diff_RoleServiceImpl.java.txt new file mode 100644 index 0000000000..d58b3eedc8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/654/gitmerge_recursive_myers/diff_RoleServiceImpl.java.txt @@ -0,0 +1,166 @@ +====1 +1:17a +2:18c +3:18c + import cn.hutool.core.collection.CollectionUtil; +====1 +1:19a +2:21c +3:21c + import me.zhengjie.modules.security.service.UserCacheClean; +====1 +1:42a +2:45c +3:45c + +====1 +1:61a +2:65c +3:65c + private final UserCacheClean userCacheClean; +====1 +1:71c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder))); +2:75c +3:75c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder))); +====1 +1:76c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder),pageable); +2:80c +3:80c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder), pageable); +====1 +1:85c + ValidationUtil.isNull(role.getId(),"Role","id",id); +2:89c +3:89c + ValidationUtil.isNull(role.getId(), "Role", "id", id); +====1 +1:92,93c + if(roleRepository.findByName(resources.getName()) != null){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:96,97c +3:96,97c + if (roleRepository.findByName(resources.getName()) != null) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:102c + ValidationUtil.isNull(role.getId(),"Role","id",resources.getId()); +2:106c +3:106c + ValidationUtil.isNull(role.getId(), "Role", "id", resources.getId()); +====1 +1:106,107c + if(role1 != null && !role1.getId().equals(role.getId())){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:110,111c +3:110,111c + if (role1 != null && !role1.getId().equals(role.getId())) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:123c + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); +2:126a +3:126a +==== +1:126,128c + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.del("role::id:" + resources.getId()); +2:129,140c + <<<<<<< HEAD + cleanCache(resources, users); + ||||||| 0cd4ff90 + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.del("role::id:" + resources.getId()); + ======= + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.delByKeys("role::auth:",userIds); + redisUtils.del("role::id:" + resources.getId()); + >>>>>>> TEMP_RIGHT_BRANCH +3:129c + cleanCache(resources, users); +====1 +1:131a +2:144c +3:133c + +====1 +1:168c + if(user.getIsAdmin()){ +2:181c +3:170c + if (user.getIsAdmin()) { +====1 +1:185c + Map map = new LinkedHashMap<>(); +2:198c +3:187c + Map map = new LinkedHashMap<>(); +====1 +1:195,206c + /** + * 清理缓存 + * @param id / + */ + public void delCaches(Long id){ + List users = userRepository.findByRoleId(id); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys("data::user:",userIds); + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.delByKeys("role::auth:",userIds); + } + +2:207a +3:196a +====1 +1:209c + if(userRepository.countByRoles(ids) > 0){ +2:210c +3:199c + if (userRepository.countByRoles(ids) > 0) { +====1 +1:217a +2:219,256c +3:208,245c + + /** + * 清理缓存 + * + * @param id / + */ + public void delCaches(Long id) { + List users = userRepository.findByRoleId(id); + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.DATE_USER, userIds); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.delByKeys(CacheKey.ROLE_AUTH, userIds); + } + + } + + /** + * 清理缓存 + * + * @param resources + * @param users + */ + private void cleanCache(Role resources, List users) { + // 清理缓存 + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.del(CacheKey.ROLE_ID + resources.getId()); + } + } + diff --git a/src/python/merge_conflict_analysis_diffs/654/gitmerge_recursive_patience/diff_RoleServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/654/gitmerge_recursive_patience/diff_RoleServiceImpl.java.txt new file mode 100644 index 0000000000..d58b3eedc8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/654/gitmerge_recursive_patience/diff_RoleServiceImpl.java.txt @@ -0,0 +1,166 @@ +====1 +1:17a +2:18c +3:18c + import cn.hutool.core.collection.CollectionUtil; +====1 +1:19a +2:21c +3:21c + import me.zhengjie.modules.security.service.UserCacheClean; +====1 +1:42a +2:45c +3:45c + +====1 +1:61a +2:65c +3:65c + private final UserCacheClean userCacheClean; +====1 +1:71c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder))); +2:75c +3:75c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder))); +====1 +1:76c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder),pageable); +2:80c +3:80c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder), pageable); +====1 +1:85c + ValidationUtil.isNull(role.getId(),"Role","id",id); +2:89c +3:89c + ValidationUtil.isNull(role.getId(), "Role", "id", id); +====1 +1:92,93c + if(roleRepository.findByName(resources.getName()) != null){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:96,97c +3:96,97c + if (roleRepository.findByName(resources.getName()) != null) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:102c + ValidationUtil.isNull(role.getId(),"Role","id",resources.getId()); +2:106c +3:106c + ValidationUtil.isNull(role.getId(), "Role", "id", resources.getId()); +====1 +1:106,107c + if(role1 != null && !role1.getId().equals(role.getId())){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:110,111c +3:110,111c + if (role1 != null && !role1.getId().equals(role.getId())) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:123c + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); +2:126a +3:126a +==== +1:126,128c + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.del("role::id:" + resources.getId()); +2:129,140c + <<<<<<< HEAD + cleanCache(resources, users); + ||||||| 0cd4ff90 + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.del("role::id:" + resources.getId()); + ======= + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.delByKeys("role::auth:",userIds); + redisUtils.del("role::id:" + resources.getId()); + >>>>>>> TEMP_RIGHT_BRANCH +3:129c + cleanCache(resources, users); +====1 +1:131a +2:144c +3:133c + +====1 +1:168c + if(user.getIsAdmin()){ +2:181c +3:170c + if (user.getIsAdmin()) { +====1 +1:185c + Map map = new LinkedHashMap<>(); +2:198c +3:187c + Map map = new LinkedHashMap<>(); +====1 +1:195,206c + /** + * 清理缓存 + * @param id / + */ + public void delCaches(Long id){ + List users = userRepository.findByRoleId(id); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys("data::user:",userIds); + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.delByKeys("role::auth:",userIds); + } + +2:207a +3:196a +====1 +1:209c + if(userRepository.countByRoles(ids) > 0){ +2:210c +3:199c + if (userRepository.countByRoles(ids) > 0) { +====1 +1:217a +2:219,256c +3:208,245c + + /** + * 清理缓存 + * + * @param id / + */ + public void delCaches(Long id) { + List users = userRepository.findByRoleId(id); + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.DATE_USER, userIds); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.delByKeys(CacheKey.ROLE_AUTH, userIds); + } + + } + + /** + * 清理缓存 + * + * @param resources + * @param users + */ + private void cleanCache(Role resources, List users) { + // 清理缓存 + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.del(CacheKey.ROLE_ID + resources.getId()); + } + } + diff --git a/src/python/merge_conflict_analysis_diffs/654/intellimerge/diff_RoleServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/654/intellimerge/diff_RoleServiceImpl.java.txt new file mode 100644 index 0000000000..d58b3eedc8 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/654/intellimerge/diff_RoleServiceImpl.java.txt @@ -0,0 +1,166 @@ +====1 +1:17a +2:18c +3:18c + import cn.hutool.core.collection.CollectionUtil; +====1 +1:19a +2:21c +3:21c + import me.zhengjie.modules.security.service.UserCacheClean; +====1 +1:42a +2:45c +3:45c + +====1 +1:61a +2:65c +3:65c + private final UserCacheClean userCacheClean; +====1 +1:71c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder))); +2:75c +3:75c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder))); +====1 +1:76c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder),pageable); +2:80c +3:80c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder), pageable); +====1 +1:85c + ValidationUtil.isNull(role.getId(),"Role","id",id); +2:89c +3:89c + ValidationUtil.isNull(role.getId(), "Role", "id", id); +====1 +1:92,93c + if(roleRepository.findByName(resources.getName()) != null){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:96,97c +3:96,97c + if (roleRepository.findByName(resources.getName()) != null) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:102c + ValidationUtil.isNull(role.getId(),"Role","id",resources.getId()); +2:106c +3:106c + ValidationUtil.isNull(role.getId(), "Role", "id", resources.getId()); +====1 +1:106,107c + if(role1 != null && !role1.getId().equals(role.getId())){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:110,111c +3:110,111c + if (role1 != null && !role1.getId().equals(role.getId())) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:123c + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); +2:126a +3:126a +==== +1:126,128c + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.del("role::id:" + resources.getId()); +2:129,140c + <<<<<<< HEAD + cleanCache(resources, users); + ||||||| 0cd4ff90 + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.del("role::id:" + resources.getId()); + ======= + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.delByKeys("role::auth:",userIds); + redisUtils.del("role::id:" + resources.getId()); + >>>>>>> TEMP_RIGHT_BRANCH +3:129c + cleanCache(resources, users); +====1 +1:131a +2:144c +3:133c + +====1 +1:168c + if(user.getIsAdmin()){ +2:181c +3:170c + if (user.getIsAdmin()) { +====1 +1:185c + Map map = new LinkedHashMap<>(); +2:198c +3:187c + Map map = new LinkedHashMap<>(); +====1 +1:195,206c + /** + * 清理缓存 + * @param id / + */ + public void delCaches(Long id){ + List users = userRepository.findByRoleId(id); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys("data::user:",userIds); + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.delByKeys("role::auth:",userIds); + } + +2:207a +3:196a +====1 +1:209c + if(userRepository.countByRoles(ids) > 0){ +2:210c +3:199c + if (userRepository.countByRoles(ids) > 0) { +====1 +1:217a +2:219,256c +3:208,245c + + /** + * 清理缓存 + * + * @param id / + */ + public void delCaches(Long id) { + List users = userRepository.findByRoleId(id); + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.DATE_USER, userIds); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.delByKeys(CacheKey.ROLE_AUTH, userIds); + } + + } + + /** + * 清理缓存 + * + * @param resources + * @param users + */ + private void cleanCache(Role resources, List users) { + // 清理缓存 + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.del(CacheKey.ROLE_ID + resources.getId()); + } + } + diff --git a/src/python/merge_conflict_analysis_diffs/654/spork/diff_RoleServiceImpl.java.txt b/src/python/merge_conflict_analysis_diffs/654/spork/diff_RoleServiceImpl.java.txt new file mode 100644 index 0000000000..a91b632048 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/654/spork/diff_RoleServiceImpl.java.txt @@ -0,0 +1,154 @@ +====1 +1:17a +2:18c +3:18c + import cn.hutool.core.collection.CollectionUtil; +====1 +1:19a +2:21c +3:21c + import me.zhengjie.modules.security.service.UserCacheClean; +====1 +1:42a +2:45c +3:45c + +====1 +1:61a +2:65c +3:65c + private final UserCacheClean userCacheClean; +====1 +1:71c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder))); +2:75c +3:75c + return roleMapper.toDto(roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder))); +====1 +1:76c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root,criteria,criteriaBuilder),pageable); +2:80c +3:80c + Page page = roleRepository.findAll((root, criteriaQuery, criteriaBuilder) -> QueryHelp.getPredicate(root, criteria, criteriaBuilder), pageable); +====1 +1:85c + ValidationUtil.isNull(role.getId(),"Role","id",id); +2:89c +3:89c + ValidationUtil.isNull(role.getId(), "Role", "id", id); +====1 +1:92,93c + if(roleRepository.findByName(resources.getName()) != null){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:96,97c +3:96,97c + if (roleRepository.findByName(resources.getName()) != null) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:102c + ValidationUtil.isNull(role.getId(),"Role","id",resources.getId()); +2:106c +3:106c + ValidationUtil.isNull(role.getId(), "Role", "id", resources.getId()); +====1 +1:106,107c + if(role1 != null && !role1.getId().equals(role.getId())){ + throw new EntityExistException(Role.class,"username",resources.getName()); +2:110,111c +3:110,111c + if (role1 != null && !role1.getId().equals(role.getId())) { + throw new EntityExistException(Role.class, "username", resources.getName()); +====1 +1:123c + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); +2:126a +3:126a +====1 +1:126,128c + // 清理缓存 + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.del("role::id:" + resources.getId()); +2:129c +3:129c + cleanCache(resources, users); +====1 +1:131a +2:133c +3:133c + +====1 +1:168c + if(user.getIsAdmin()){ +2:170c +3:170c + if (user.getIsAdmin()) { +====1 +1:185c + Map map = new LinkedHashMap<>(); +2:187c +3:187c + Map map = new LinkedHashMap<>(); +====1 +1:195,206c + /** + * 清理缓存 + * @param id / + */ + public void delCaches(Long id){ + List users = userRepository.findByRoleId(id); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys("data::user:",userIds); + redisUtils.delByKeys("menu::user:",userIds); + redisUtils.delByKeys("role::auth:",userIds); + } + +2:196a +3:196a +====1 +1:209c + if(userRepository.countByRoles(ids) > 0){ +2:199c +3:199c + if (userRepository.countByRoles(ids) > 0) { +====1 +1:217a +2:208,245c +3:208,245c + + /** + * 清理缓存 + * + * @param id / + */ + public void delCaches(Long id) { + List users = userRepository.findByRoleId(id); + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.DATE_USER, userIds); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.delByKeys(CacheKey.ROLE_AUTH, userIds); + } + + } + + /** + * 清理缓存 + * + * @param resources + * @param users + */ + private void cleanCache(Role resources, List users) { + // 清理缓存 + if (CollectionUtil.isNotEmpty(users)) { + users.stream().forEach(item -> { + userCacheClean.cleanUserCache(item.getUsername()); + }); + Set userIds = users.stream().map(User::getId).collect(Collectors.toSet()); + redisUtils.delByKeys(CacheKey.MENU_USER, userIds); + redisUtils.del(CacheKey.ROLE_ID + resources.getId()); + } + } + diff --git a/src/python/merge_conflict_analysis_diffs/693/git_hires_merge/diff_DefaultGenerics.java.txt b/src/python/merge_conflict_analysis_diffs/693/git_hires_merge/diff_DefaultGenerics.java.txt new file mode 100644 index 0000000000..7db4978f42 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/693/git_hires_merge/diff_DefaultGenerics.java.txt @@ -0,0 +1,94 @@ +====3 +1:1c +2:1c + /* Copyright (c) 2008-2022, Nathan Sweet +3:1c + /* Copyright (c) 2008-2023, Nathan Sweet +====1 +1:29,34c + public final class DefaultGenerics implements Generics { + private final Kryo kryo; + + private int genericTypesSize; + private GenericType[] genericTypes = new GenericType[16]; + private int[] depths = new int[16]; +2:29c +3:29c + public final class DefaultGenerics extends BaseGenerics { +====1 +1:40,91c + this.kryo = kryo; + } + + @Override + public void pushGenericType (GenericType fieldType) { + // Ensure genericTypes and depths capacity. + int size = genericTypesSize; + if (size + 1 == genericTypes.length) { + GenericType[] genericTypesNew = new GenericType[genericTypes.length << 1]; + System.arraycopy(genericTypes, 0, genericTypesNew, 0, size); + genericTypes = genericTypesNew; + int[] depthsNew = new int[depths.length << 1]; + System.arraycopy(depths, 0, depthsNew, 0, size); + depths = depthsNew; + } + + genericTypesSize = size + 1; + genericTypes[size] = fieldType; + depths[size] = kryo.getDepth(); + } + + @Override + public void popGenericType () { + int size = genericTypesSize; + if (size == 0) return; + size--; + if (depths[size] < kryo.getDepth()) return; + genericTypes[size] = null; + genericTypesSize = size; + } + + @Override + public GenericType[] nextGenericTypes () { + int index = genericTypesSize; + if (index > 0) { + index--; + GenericType genericType = genericTypes[index]; + if (genericType.arguments == null) return null; + // The depth must match to prevent the types being wrong if a serializer doesn't call nextGenericTypes. + if (depths[index] == kryo.getDepth() - 1) { + pushGenericType(genericType.arguments[genericType.arguments.length - 1]); + return genericType.arguments; + } + } + return null; + } + + @Override + public Class nextGenericClass () { + GenericType[] arguments = nextGenericTypes(); + if (arguments == null) return null; + return arguments[0].resolve(this); +2:35c +3:35c + super(kryo); +==== +1:96,97c + // Do not store type variables if hierarchy is empty or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; +2:40,41c + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; +3:40,42c + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters, or we have more + // arguments than the hierarchy has parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length || args.length > hierarchy.counts.length) return 0; +====1 +1:148,152c + @Override + public int getGenericTypesSize () { + return genericTypesSize; + } + +2:91a +3:92a diff --git a/src/python/merge_conflict_analysis_diffs/693/gitmerge_ort/diff_DefaultGenerics.java.txt b/src/python/merge_conflict_analysis_diffs/693/gitmerge_ort/diff_DefaultGenerics.java.txt new file mode 100644 index 0000000000..cf4318f6c3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/693/gitmerge_ort/diff_DefaultGenerics.java.txt @@ -0,0 +1,103 @@ +====1 +1:1c + /* Copyright (c) 2008-2022, Nathan Sweet +2:1c +3:1c + /* Copyright (c) 2008-2023, Nathan Sweet +====1 +1:29,34c + public final class DefaultGenerics implements Generics { + private final Kryo kryo; + + private int genericTypesSize; + private GenericType[] genericTypes = new GenericType[16]; + private int[] depths = new int[16]; +2:29c +3:29c + public final class DefaultGenerics extends BaseGenerics { +====1 +1:40,91c + this.kryo = kryo; + } + + @Override + public void pushGenericType (GenericType fieldType) { + // Ensure genericTypes and depths capacity. + int size = genericTypesSize; + if (size + 1 == genericTypes.length) { + GenericType[] genericTypesNew = new GenericType[genericTypes.length << 1]; + System.arraycopy(genericTypes, 0, genericTypesNew, 0, size); + genericTypes = genericTypesNew; + int[] depthsNew = new int[depths.length << 1]; + System.arraycopy(depths, 0, depthsNew, 0, size); + depths = depthsNew; + } + + genericTypesSize = size + 1; + genericTypes[size] = fieldType; + depths[size] = kryo.getDepth(); + } + + @Override + public void popGenericType () { + int size = genericTypesSize; + if (size == 0) return; + size--; + if (depths[size] < kryo.getDepth()) return; + genericTypes[size] = null; + genericTypesSize = size; + } + + @Override + public GenericType[] nextGenericTypes () { + int index = genericTypesSize; + if (index > 0) { + index--; + GenericType genericType = genericTypes[index]; + if (genericType.arguments == null) return null; + // The depth must match to prevent the types being wrong if a serializer doesn't call nextGenericTypes. + if (depths[index] == kryo.getDepth() - 1) { + pushGenericType(genericType.arguments[genericType.arguments.length - 1]); + return genericType.arguments; + } + } + return null; + } + + @Override + public Class nextGenericClass () { + GenericType[] arguments = nextGenericTypes(); + if (arguments == null) return null; + return arguments[0].resolve(this); +2:35c +3:35c + super(kryo); +==== +1:96,97c + // Do not store type variables if hierarchy is empty or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; +2:40,50c + <<<<<<< HEAD + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; + ||||||| b7a6a396 + // Do not store type variables if hierarchy is empty or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; + ======= + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters, or we have more + // arguments than the hierarchy has parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length || args.length > hierarchy.counts.length) return 0; + >>>>>>> TEMP_RIGHT_BRANCH +3:40,42c + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters, or we have more + // arguments than the hierarchy has parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length || args.length > hierarchy.counts.length) return 0; +====1 +1:148,152c + @Override + public int getGenericTypesSize () { + return genericTypesSize; + } + +2:100a +3:92a diff --git a/src/python/merge_conflict_analysis_diffs/693/gitmerge_ort_adjacent/diff_DefaultGenerics.java.txt b/src/python/merge_conflict_analysis_diffs/693/gitmerge_ort_adjacent/diff_DefaultGenerics.java.txt new file mode 100644 index 0000000000..7db4978f42 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/693/gitmerge_ort_adjacent/diff_DefaultGenerics.java.txt @@ -0,0 +1,94 @@ +====3 +1:1c +2:1c + /* Copyright (c) 2008-2022, Nathan Sweet +3:1c + /* Copyright (c) 2008-2023, Nathan Sweet +====1 +1:29,34c + public final class DefaultGenerics implements Generics { + private final Kryo kryo; + + private int genericTypesSize; + private GenericType[] genericTypes = new GenericType[16]; + private int[] depths = new int[16]; +2:29c +3:29c + public final class DefaultGenerics extends BaseGenerics { +====1 +1:40,91c + this.kryo = kryo; + } + + @Override + public void pushGenericType (GenericType fieldType) { + // Ensure genericTypes and depths capacity. + int size = genericTypesSize; + if (size + 1 == genericTypes.length) { + GenericType[] genericTypesNew = new GenericType[genericTypes.length << 1]; + System.arraycopy(genericTypes, 0, genericTypesNew, 0, size); + genericTypes = genericTypesNew; + int[] depthsNew = new int[depths.length << 1]; + System.arraycopy(depths, 0, depthsNew, 0, size); + depths = depthsNew; + } + + genericTypesSize = size + 1; + genericTypes[size] = fieldType; + depths[size] = kryo.getDepth(); + } + + @Override + public void popGenericType () { + int size = genericTypesSize; + if (size == 0) return; + size--; + if (depths[size] < kryo.getDepth()) return; + genericTypes[size] = null; + genericTypesSize = size; + } + + @Override + public GenericType[] nextGenericTypes () { + int index = genericTypesSize; + if (index > 0) { + index--; + GenericType genericType = genericTypes[index]; + if (genericType.arguments == null) return null; + // The depth must match to prevent the types being wrong if a serializer doesn't call nextGenericTypes. + if (depths[index] == kryo.getDepth() - 1) { + pushGenericType(genericType.arguments[genericType.arguments.length - 1]); + return genericType.arguments; + } + } + return null; + } + + @Override + public Class nextGenericClass () { + GenericType[] arguments = nextGenericTypes(); + if (arguments == null) return null; + return arguments[0].resolve(this); +2:35c +3:35c + super(kryo); +==== +1:96,97c + // Do not store type variables if hierarchy is empty or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; +2:40,41c + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; +3:40,42c + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters, or we have more + // arguments than the hierarchy has parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length || args.length > hierarchy.counts.length) return 0; +====1 +1:148,152c + @Override + public int getGenericTypesSize () { + return genericTypesSize; + } + +2:91a +3:92a diff --git a/src/python/merge_conflict_analysis_diffs/693/gitmerge_ort_ignorespace/diff_DefaultGenerics.java.txt b/src/python/merge_conflict_analysis_diffs/693/gitmerge_ort_ignorespace/diff_DefaultGenerics.java.txt new file mode 100644 index 0000000000..cf4318f6c3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/693/gitmerge_ort_ignorespace/diff_DefaultGenerics.java.txt @@ -0,0 +1,103 @@ +====1 +1:1c + /* Copyright (c) 2008-2022, Nathan Sweet +2:1c +3:1c + /* Copyright (c) 2008-2023, Nathan Sweet +====1 +1:29,34c + public final class DefaultGenerics implements Generics { + private final Kryo kryo; + + private int genericTypesSize; + private GenericType[] genericTypes = new GenericType[16]; + private int[] depths = new int[16]; +2:29c +3:29c + public final class DefaultGenerics extends BaseGenerics { +====1 +1:40,91c + this.kryo = kryo; + } + + @Override + public void pushGenericType (GenericType fieldType) { + // Ensure genericTypes and depths capacity. + int size = genericTypesSize; + if (size + 1 == genericTypes.length) { + GenericType[] genericTypesNew = new GenericType[genericTypes.length << 1]; + System.arraycopy(genericTypes, 0, genericTypesNew, 0, size); + genericTypes = genericTypesNew; + int[] depthsNew = new int[depths.length << 1]; + System.arraycopy(depths, 0, depthsNew, 0, size); + depths = depthsNew; + } + + genericTypesSize = size + 1; + genericTypes[size] = fieldType; + depths[size] = kryo.getDepth(); + } + + @Override + public void popGenericType () { + int size = genericTypesSize; + if (size == 0) return; + size--; + if (depths[size] < kryo.getDepth()) return; + genericTypes[size] = null; + genericTypesSize = size; + } + + @Override + public GenericType[] nextGenericTypes () { + int index = genericTypesSize; + if (index > 0) { + index--; + GenericType genericType = genericTypes[index]; + if (genericType.arguments == null) return null; + // The depth must match to prevent the types being wrong if a serializer doesn't call nextGenericTypes. + if (depths[index] == kryo.getDepth() - 1) { + pushGenericType(genericType.arguments[genericType.arguments.length - 1]); + return genericType.arguments; + } + } + return null; + } + + @Override + public Class nextGenericClass () { + GenericType[] arguments = nextGenericTypes(); + if (arguments == null) return null; + return arguments[0].resolve(this); +2:35c +3:35c + super(kryo); +==== +1:96,97c + // Do not store type variables if hierarchy is empty or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; +2:40,50c + <<<<<<< HEAD + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; + ||||||| b7a6a396 + // Do not store type variables if hierarchy is empty or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; + ======= + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters, or we have more + // arguments than the hierarchy has parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length || args.length > hierarchy.counts.length) return 0; + >>>>>>> TEMP_RIGHT_BRANCH +3:40,42c + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters, or we have more + // arguments than the hierarchy has parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length || args.length > hierarchy.counts.length) return 0; +====1 +1:148,152c + @Override + public int getGenericTypesSize () { + return genericTypesSize; + } + +2:100a +3:92a diff --git a/src/python/merge_conflict_analysis_diffs/693/gitmerge_ort_imports/diff_DefaultGenerics.java.txt b/src/python/merge_conflict_analysis_diffs/693/gitmerge_ort_imports/diff_DefaultGenerics.java.txt new file mode 100644 index 0000000000..7db4978f42 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/693/gitmerge_ort_imports/diff_DefaultGenerics.java.txt @@ -0,0 +1,94 @@ +====3 +1:1c +2:1c + /* Copyright (c) 2008-2022, Nathan Sweet +3:1c + /* Copyright (c) 2008-2023, Nathan Sweet +====1 +1:29,34c + public final class DefaultGenerics implements Generics { + private final Kryo kryo; + + private int genericTypesSize; + private GenericType[] genericTypes = new GenericType[16]; + private int[] depths = new int[16]; +2:29c +3:29c + public final class DefaultGenerics extends BaseGenerics { +====1 +1:40,91c + this.kryo = kryo; + } + + @Override + public void pushGenericType (GenericType fieldType) { + // Ensure genericTypes and depths capacity. + int size = genericTypesSize; + if (size + 1 == genericTypes.length) { + GenericType[] genericTypesNew = new GenericType[genericTypes.length << 1]; + System.arraycopy(genericTypes, 0, genericTypesNew, 0, size); + genericTypes = genericTypesNew; + int[] depthsNew = new int[depths.length << 1]; + System.arraycopy(depths, 0, depthsNew, 0, size); + depths = depthsNew; + } + + genericTypesSize = size + 1; + genericTypes[size] = fieldType; + depths[size] = kryo.getDepth(); + } + + @Override + public void popGenericType () { + int size = genericTypesSize; + if (size == 0) return; + size--; + if (depths[size] < kryo.getDepth()) return; + genericTypes[size] = null; + genericTypesSize = size; + } + + @Override + public GenericType[] nextGenericTypes () { + int index = genericTypesSize; + if (index > 0) { + index--; + GenericType genericType = genericTypes[index]; + if (genericType.arguments == null) return null; + // The depth must match to prevent the types being wrong if a serializer doesn't call nextGenericTypes. + if (depths[index] == kryo.getDepth() - 1) { + pushGenericType(genericType.arguments[genericType.arguments.length - 1]); + return genericType.arguments; + } + } + return null; + } + + @Override + public Class nextGenericClass () { + GenericType[] arguments = nextGenericTypes(); + if (arguments == null) return null; + return arguments[0].resolve(this); +2:35c +3:35c + super(kryo); +==== +1:96,97c + // Do not store type variables if hierarchy is empty or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; +2:40,41c + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; +3:40,42c + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters, or we have more + // arguments than the hierarchy has parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length || args.length > hierarchy.counts.length) return 0; +====1 +1:148,152c + @Override + public int getGenericTypesSize () { + return genericTypesSize; + } + +2:91a +3:92a diff --git a/src/python/merge_conflict_analysis_diffs/693/gitmerge_ort_imports_ignorespace/diff_DefaultGenerics.java.txt b/src/python/merge_conflict_analysis_diffs/693/gitmerge_ort_imports_ignorespace/diff_DefaultGenerics.java.txt new file mode 100644 index 0000000000..7db4978f42 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/693/gitmerge_ort_imports_ignorespace/diff_DefaultGenerics.java.txt @@ -0,0 +1,94 @@ +====3 +1:1c +2:1c + /* Copyright (c) 2008-2022, Nathan Sweet +3:1c + /* Copyright (c) 2008-2023, Nathan Sweet +====1 +1:29,34c + public final class DefaultGenerics implements Generics { + private final Kryo kryo; + + private int genericTypesSize; + private GenericType[] genericTypes = new GenericType[16]; + private int[] depths = new int[16]; +2:29c +3:29c + public final class DefaultGenerics extends BaseGenerics { +====1 +1:40,91c + this.kryo = kryo; + } + + @Override + public void pushGenericType (GenericType fieldType) { + // Ensure genericTypes and depths capacity. + int size = genericTypesSize; + if (size + 1 == genericTypes.length) { + GenericType[] genericTypesNew = new GenericType[genericTypes.length << 1]; + System.arraycopy(genericTypes, 0, genericTypesNew, 0, size); + genericTypes = genericTypesNew; + int[] depthsNew = new int[depths.length << 1]; + System.arraycopy(depths, 0, depthsNew, 0, size); + depths = depthsNew; + } + + genericTypesSize = size + 1; + genericTypes[size] = fieldType; + depths[size] = kryo.getDepth(); + } + + @Override + public void popGenericType () { + int size = genericTypesSize; + if (size == 0) return; + size--; + if (depths[size] < kryo.getDepth()) return; + genericTypes[size] = null; + genericTypesSize = size; + } + + @Override + public GenericType[] nextGenericTypes () { + int index = genericTypesSize; + if (index > 0) { + index--; + GenericType genericType = genericTypes[index]; + if (genericType.arguments == null) return null; + // The depth must match to prevent the types being wrong if a serializer doesn't call nextGenericTypes. + if (depths[index] == kryo.getDepth() - 1) { + pushGenericType(genericType.arguments[genericType.arguments.length - 1]); + return genericType.arguments; + } + } + return null; + } + + @Override + public Class nextGenericClass () { + GenericType[] arguments = nextGenericTypes(); + if (arguments == null) return null; + return arguments[0].resolve(this); +2:35c +3:35c + super(kryo); +==== +1:96,97c + // Do not store type variables if hierarchy is empty or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; +2:40,41c + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; +3:40,42c + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters, or we have more + // arguments than the hierarchy has parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length || args.length > hierarchy.counts.length) return 0; +====1 +1:148,152c + @Override + public int getGenericTypesSize () { + return genericTypesSize; + } + +2:91a +3:92a diff --git a/src/python/merge_conflict_analysis_diffs/693/gitmerge_recursive_histogram/diff_DefaultGenerics.java.txt b/src/python/merge_conflict_analysis_diffs/693/gitmerge_recursive_histogram/diff_DefaultGenerics.java.txt new file mode 100644 index 0000000000..cf4318f6c3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/693/gitmerge_recursive_histogram/diff_DefaultGenerics.java.txt @@ -0,0 +1,103 @@ +====1 +1:1c + /* Copyright (c) 2008-2022, Nathan Sweet +2:1c +3:1c + /* Copyright (c) 2008-2023, Nathan Sweet +====1 +1:29,34c + public final class DefaultGenerics implements Generics { + private final Kryo kryo; + + private int genericTypesSize; + private GenericType[] genericTypes = new GenericType[16]; + private int[] depths = new int[16]; +2:29c +3:29c + public final class DefaultGenerics extends BaseGenerics { +====1 +1:40,91c + this.kryo = kryo; + } + + @Override + public void pushGenericType (GenericType fieldType) { + // Ensure genericTypes and depths capacity. + int size = genericTypesSize; + if (size + 1 == genericTypes.length) { + GenericType[] genericTypesNew = new GenericType[genericTypes.length << 1]; + System.arraycopy(genericTypes, 0, genericTypesNew, 0, size); + genericTypes = genericTypesNew; + int[] depthsNew = new int[depths.length << 1]; + System.arraycopy(depths, 0, depthsNew, 0, size); + depths = depthsNew; + } + + genericTypesSize = size + 1; + genericTypes[size] = fieldType; + depths[size] = kryo.getDepth(); + } + + @Override + public void popGenericType () { + int size = genericTypesSize; + if (size == 0) return; + size--; + if (depths[size] < kryo.getDepth()) return; + genericTypes[size] = null; + genericTypesSize = size; + } + + @Override + public GenericType[] nextGenericTypes () { + int index = genericTypesSize; + if (index > 0) { + index--; + GenericType genericType = genericTypes[index]; + if (genericType.arguments == null) return null; + // The depth must match to prevent the types being wrong if a serializer doesn't call nextGenericTypes. + if (depths[index] == kryo.getDepth() - 1) { + pushGenericType(genericType.arguments[genericType.arguments.length - 1]); + return genericType.arguments; + } + } + return null; + } + + @Override + public Class nextGenericClass () { + GenericType[] arguments = nextGenericTypes(); + if (arguments == null) return null; + return arguments[0].resolve(this); +2:35c +3:35c + super(kryo); +==== +1:96,97c + // Do not store type variables if hierarchy is empty or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; +2:40,50c + <<<<<<< HEAD + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; + ||||||| b7a6a396 + // Do not store type variables if hierarchy is empty or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; + ======= + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters, or we have more + // arguments than the hierarchy has parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length || args.length > hierarchy.counts.length) return 0; + >>>>>>> TEMP_RIGHT_BRANCH +3:40,42c + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters, or we have more + // arguments than the hierarchy has parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length || args.length > hierarchy.counts.length) return 0; +====1 +1:148,152c + @Override + public int getGenericTypesSize () { + return genericTypesSize; + } + +2:100a +3:92a diff --git a/src/python/merge_conflict_analysis_diffs/693/gitmerge_recursive_ignorespace/diff_DefaultGenerics.java.txt b/src/python/merge_conflict_analysis_diffs/693/gitmerge_recursive_ignorespace/diff_DefaultGenerics.java.txt new file mode 100644 index 0000000000..cf4318f6c3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/693/gitmerge_recursive_ignorespace/diff_DefaultGenerics.java.txt @@ -0,0 +1,103 @@ +====1 +1:1c + /* Copyright (c) 2008-2022, Nathan Sweet +2:1c +3:1c + /* Copyright (c) 2008-2023, Nathan Sweet +====1 +1:29,34c + public final class DefaultGenerics implements Generics { + private final Kryo kryo; + + private int genericTypesSize; + private GenericType[] genericTypes = new GenericType[16]; + private int[] depths = new int[16]; +2:29c +3:29c + public final class DefaultGenerics extends BaseGenerics { +====1 +1:40,91c + this.kryo = kryo; + } + + @Override + public void pushGenericType (GenericType fieldType) { + // Ensure genericTypes and depths capacity. + int size = genericTypesSize; + if (size + 1 == genericTypes.length) { + GenericType[] genericTypesNew = new GenericType[genericTypes.length << 1]; + System.arraycopy(genericTypes, 0, genericTypesNew, 0, size); + genericTypes = genericTypesNew; + int[] depthsNew = new int[depths.length << 1]; + System.arraycopy(depths, 0, depthsNew, 0, size); + depths = depthsNew; + } + + genericTypesSize = size + 1; + genericTypes[size] = fieldType; + depths[size] = kryo.getDepth(); + } + + @Override + public void popGenericType () { + int size = genericTypesSize; + if (size == 0) return; + size--; + if (depths[size] < kryo.getDepth()) return; + genericTypes[size] = null; + genericTypesSize = size; + } + + @Override + public GenericType[] nextGenericTypes () { + int index = genericTypesSize; + if (index > 0) { + index--; + GenericType genericType = genericTypes[index]; + if (genericType.arguments == null) return null; + // The depth must match to prevent the types being wrong if a serializer doesn't call nextGenericTypes. + if (depths[index] == kryo.getDepth() - 1) { + pushGenericType(genericType.arguments[genericType.arguments.length - 1]); + return genericType.arguments; + } + } + return null; + } + + @Override + public Class nextGenericClass () { + GenericType[] arguments = nextGenericTypes(); + if (arguments == null) return null; + return arguments[0].resolve(this); +2:35c +3:35c + super(kryo); +==== +1:96,97c + // Do not store type variables if hierarchy is empty or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; +2:40,50c + <<<<<<< HEAD + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; + ||||||| b7a6a396 + // Do not store type variables if hierarchy is empty or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; + ======= + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters, or we have more + // arguments than the hierarchy has parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length || args.length > hierarchy.counts.length) return 0; + >>>>>>> TEMP_RIGHT_BRANCH +3:40,42c + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters, or we have more + // arguments than the hierarchy has parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length || args.length > hierarchy.counts.length) return 0; +====1 +1:148,152c + @Override + public int getGenericTypesSize () { + return genericTypesSize; + } + +2:100a +3:92a diff --git a/src/python/merge_conflict_analysis_diffs/693/gitmerge_recursive_minimal/diff_DefaultGenerics.java.txt b/src/python/merge_conflict_analysis_diffs/693/gitmerge_recursive_minimal/diff_DefaultGenerics.java.txt new file mode 100644 index 0000000000..cf4318f6c3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/693/gitmerge_recursive_minimal/diff_DefaultGenerics.java.txt @@ -0,0 +1,103 @@ +====1 +1:1c + /* Copyright (c) 2008-2022, Nathan Sweet +2:1c +3:1c + /* Copyright (c) 2008-2023, Nathan Sweet +====1 +1:29,34c + public final class DefaultGenerics implements Generics { + private final Kryo kryo; + + private int genericTypesSize; + private GenericType[] genericTypes = new GenericType[16]; + private int[] depths = new int[16]; +2:29c +3:29c + public final class DefaultGenerics extends BaseGenerics { +====1 +1:40,91c + this.kryo = kryo; + } + + @Override + public void pushGenericType (GenericType fieldType) { + // Ensure genericTypes and depths capacity. + int size = genericTypesSize; + if (size + 1 == genericTypes.length) { + GenericType[] genericTypesNew = new GenericType[genericTypes.length << 1]; + System.arraycopy(genericTypes, 0, genericTypesNew, 0, size); + genericTypes = genericTypesNew; + int[] depthsNew = new int[depths.length << 1]; + System.arraycopy(depths, 0, depthsNew, 0, size); + depths = depthsNew; + } + + genericTypesSize = size + 1; + genericTypes[size] = fieldType; + depths[size] = kryo.getDepth(); + } + + @Override + public void popGenericType () { + int size = genericTypesSize; + if (size == 0) return; + size--; + if (depths[size] < kryo.getDepth()) return; + genericTypes[size] = null; + genericTypesSize = size; + } + + @Override + public GenericType[] nextGenericTypes () { + int index = genericTypesSize; + if (index > 0) { + index--; + GenericType genericType = genericTypes[index]; + if (genericType.arguments == null) return null; + // The depth must match to prevent the types being wrong if a serializer doesn't call nextGenericTypes. + if (depths[index] == kryo.getDepth() - 1) { + pushGenericType(genericType.arguments[genericType.arguments.length - 1]); + return genericType.arguments; + } + } + return null; + } + + @Override + public Class nextGenericClass () { + GenericType[] arguments = nextGenericTypes(); + if (arguments == null) return null; + return arguments[0].resolve(this); +2:35c +3:35c + super(kryo); +==== +1:96,97c + // Do not store type variables if hierarchy is empty or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; +2:40,50c + <<<<<<< HEAD + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; + ||||||| b7a6a396 + // Do not store type variables if hierarchy is empty or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; + ======= + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters, or we have more + // arguments than the hierarchy has parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length || args.length > hierarchy.counts.length) return 0; + >>>>>>> TEMP_RIGHT_BRANCH +3:40,42c + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters, or we have more + // arguments than the hierarchy has parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length || args.length > hierarchy.counts.length) return 0; +====1 +1:148,152c + @Override + public int getGenericTypesSize () { + return genericTypesSize; + } + +2:100a +3:92a diff --git a/src/python/merge_conflict_analysis_diffs/693/gitmerge_recursive_myers/diff_DefaultGenerics.java.txt b/src/python/merge_conflict_analysis_diffs/693/gitmerge_recursive_myers/diff_DefaultGenerics.java.txt new file mode 100644 index 0000000000..cf4318f6c3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/693/gitmerge_recursive_myers/diff_DefaultGenerics.java.txt @@ -0,0 +1,103 @@ +====1 +1:1c + /* Copyright (c) 2008-2022, Nathan Sweet +2:1c +3:1c + /* Copyright (c) 2008-2023, Nathan Sweet +====1 +1:29,34c + public final class DefaultGenerics implements Generics { + private final Kryo kryo; + + private int genericTypesSize; + private GenericType[] genericTypes = new GenericType[16]; + private int[] depths = new int[16]; +2:29c +3:29c + public final class DefaultGenerics extends BaseGenerics { +====1 +1:40,91c + this.kryo = kryo; + } + + @Override + public void pushGenericType (GenericType fieldType) { + // Ensure genericTypes and depths capacity. + int size = genericTypesSize; + if (size + 1 == genericTypes.length) { + GenericType[] genericTypesNew = new GenericType[genericTypes.length << 1]; + System.arraycopy(genericTypes, 0, genericTypesNew, 0, size); + genericTypes = genericTypesNew; + int[] depthsNew = new int[depths.length << 1]; + System.arraycopy(depths, 0, depthsNew, 0, size); + depths = depthsNew; + } + + genericTypesSize = size + 1; + genericTypes[size] = fieldType; + depths[size] = kryo.getDepth(); + } + + @Override + public void popGenericType () { + int size = genericTypesSize; + if (size == 0) return; + size--; + if (depths[size] < kryo.getDepth()) return; + genericTypes[size] = null; + genericTypesSize = size; + } + + @Override + public GenericType[] nextGenericTypes () { + int index = genericTypesSize; + if (index > 0) { + index--; + GenericType genericType = genericTypes[index]; + if (genericType.arguments == null) return null; + // The depth must match to prevent the types being wrong if a serializer doesn't call nextGenericTypes. + if (depths[index] == kryo.getDepth() - 1) { + pushGenericType(genericType.arguments[genericType.arguments.length - 1]); + return genericType.arguments; + } + } + return null; + } + + @Override + public Class nextGenericClass () { + GenericType[] arguments = nextGenericTypes(); + if (arguments == null) return null; + return arguments[0].resolve(this); +2:35c +3:35c + super(kryo); +==== +1:96,97c + // Do not store type variables if hierarchy is empty or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; +2:40,50c + <<<<<<< HEAD + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; + ||||||| b7a6a396 + // Do not store type variables if hierarchy is empty or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; + ======= + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters, or we have more + // arguments than the hierarchy has parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length || args.length > hierarchy.counts.length) return 0; + >>>>>>> TEMP_RIGHT_BRANCH +3:40,42c + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters, or we have more + // arguments than the hierarchy has parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length || args.length > hierarchy.counts.length) return 0; +====1 +1:148,152c + @Override + public int getGenericTypesSize () { + return genericTypesSize; + } + +2:100a +3:92a diff --git a/src/python/merge_conflict_analysis_diffs/693/gitmerge_recursive_patience/diff_DefaultGenerics.java.txt b/src/python/merge_conflict_analysis_diffs/693/gitmerge_recursive_patience/diff_DefaultGenerics.java.txt new file mode 100644 index 0000000000..cf4318f6c3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/693/gitmerge_recursive_patience/diff_DefaultGenerics.java.txt @@ -0,0 +1,103 @@ +====1 +1:1c + /* Copyright (c) 2008-2022, Nathan Sweet +2:1c +3:1c + /* Copyright (c) 2008-2023, Nathan Sweet +====1 +1:29,34c + public final class DefaultGenerics implements Generics { + private final Kryo kryo; + + private int genericTypesSize; + private GenericType[] genericTypes = new GenericType[16]; + private int[] depths = new int[16]; +2:29c +3:29c + public final class DefaultGenerics extends BaseGenerics { +====1 +1:40,91c + this.kryo = kryo; + } + + @Override + public void pushGenericType (GenericType fieldType) { + // Ensure genericTypes and depths capacity. + int size = genericTypesSize; + if (size + 1 == genericTypes.length) { + GenericType[] genericTypesNew = new GenericType[genericTypes.length << 1]; + System.arraycopy(genericTypes, 0, genericTypesNew, 0, size); + genericTypes = genericTypesNew; + int[] depthsNew = new int[depths.length << 1]; + System.arraycopy(depths, 0, depthsNew, 0, size); + depths = depthsNew; + } + + genericTypesSize = size + 1; + genericTypes[size] = fieldType; + depths[size] = kryo.getDepth(); + } + + @Override + public void popGenericType () { + int size = genericTypesSize; + if (size == 0) return; + size--; + if (depths[size] < kryo.getDepth()) return; + genericTypes[size] = null; + genericTypesSize = size; + } + + @Override + public GenericType[] nextGenericTypes () { + int index = genericTypesSize; + if (index > 0) { + index--; + GenericType genericType = genericTypes[index]; + if (genericType.arguments == null) return null; + // The depth must match to prevent the types being wrong if a serializer doesn't call nextGenericTypes. + if (depths[index] == kryo.getDepth() - 1) { + pushGenericType(genericType.arguments[genericType.arguments.length - 1]); + return genericType.arguments; + } + } + return null; + } + + @Override + public Class nextGenericClass () { + GenericType[] arguments = nextGenericTypes(); + if (arguments == null) return null; + return arguments[0].resolve(this); +2:35c +3:35c + super(kryo); +==== +1:96,97c + // Do not store type variables if hierarchy is empty or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; +2:40,50c + <<<<<<< HEAD + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; + ||||||| b7a6a396 + // Do not store type variables if hierarchy is empty or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; + ======= + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters, or we have more + // arguments than the hierarchy has parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length || args.length > hierarchy.counts.length) return 0; + >>>>>>> TEMP_RIGHT_BRANCH +3:40,42c + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters, or we have more + // arguments than the hierarchy has parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length || args.length > hierarchy.counts.length) return 0; +====1 +1:148,152c + @Override + public int getGenericTypesSize () { + return genericTypesSize; + } + +2:100a +3:92a diff --git a/src/python/merge_conflict_analysis_diffs/693/intellimerge/diff_DefaultGenerics.java.txt b/src/python/merge_conflict_analysis_diffs/693/intellimerge/diff_DefaultGenerics.java.txt new file mode 100644 index 0000000000..cf4318f6c3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/693/intellimerge/diff_DefaultGenerics.java.txt @@ -0,0 +1,103 @@ +====1 +1:1c + /* Copyright (c) 2008-2022, Nathan Sweet +2:1c +3:1c + /* Copyright (c) 2008-2023, Nathan Sweet +====1 +1:29,34c + public final class DefaultGenerics implements Generics { + private final Kryo kryo; + + private int genericTypesSize; + private GenericType[] genericTypes = new GenericType[16]; + private int[] depths = new int[16]; +2:29c +3:29c + public final class DefaultGenerics extends BaseGenerics { +====1 +1:40,91c + this.kryo = kryo; + } + + @Override + public void pushGenericType (GenericType fieldType) { + // Ensure genericTypes and depths capacity. + int size = genericTypesSize; + if (size + 1 == genericTypes.length) { + GenericType[] genericTypesNew = new GenericType[genericTypes.length << 1]; + System.arraycopy(genericTypes, 0, genericTypesNew, 0, size); + genericTypes = genericTypesNew; + int[] depthsNew = new int[depths.length << 1]; + System.arraycopy(depths, 0, depthsNew, 0, size); + depths = depthsNew; + } + + genericTypesSize = size + 1; + genericTypes[size] = fieldType; + depths[size] = kryo.getDepth(); + } + + @Override + public void popGenericType () { + int size = genericTypesSize; + if (size == 0) return; + size--; + if (depths[size] < kryo.getDepth()) return; + genericTypes[size] = null; + genericTypesSize = size; + } + + @Override + public GenericType[] nextGenericTypes () { + int index = genericTypesSize; + if (index > 0) { + index--; + GenericType genericType = genericTypes[index]; + if (genericType.arguments == null) return null; + // The depth must match to prevent the types being wrong if a serializer doesn't call nextGenericTypes. + if (depths[index] == kryo.getDepth() - 1) { + pushGenericType(genericType.arguments[genericType.arguments.length - 1]); + return genericType.arguments; + } + } + return null; + } + + @Override + public Class nextGenericClass () { + GenericType[] arguments = nextGenericTypes(); + if (arguments == null) return null; + return arguments[0].resolve(this); +2:35c +3:35c + super(kryo); +==== +1:96,97c + // Do not store type variables if hierarchy is empty or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; +2:40,50c + <<<<<<< HEAD + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; + ||||||| b7a6a396 + // Do not store type variables if hierarchy is empty or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; + ======= + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters, or we have more + // arguments than the hierarchy has parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length || args.length > hierarchy.counts.length) return 0; + >>>>>>> TEMP_RIGHT_BRANCH +3:40,42c + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters, or we have more + // arguments than the hierarchy has parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length || args.length > hierarchy.counts.length) return 0; +====1 +1:148,152c + @Override + public int getGenericTypesSize () { + return genericTypesSize; + } + +2:100a +3:92a diff --git a/src/python/merge_conflict_analysis_diffs/693/spork/diff_CollectionSerializerTest.java.txt b/src/python/merge_conflict_analysis_diffs/693/spork/diff_CollectionSerializerTest.java.txt new file mode 100644 index 0000000000..1a0883b5ff --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/693/spork/diff_CollectionSerializerTest.java.txt @@ -0,0 +1,41 @@ +====3 +1:1c +2:1c + /* Copyright (c) 2008-2022, Nathan Sweet +3:1c + /* Copyright (c) 2008-2023, Nathan Sweet +====1 +1:34a +2:35c +3:35c + import java.util.Objects; +====1 +1:114a +2:116,126c +3:116,126c + @Test + void testGenerics() { + kryo.register(HasGenerics.class); + kryo.register(ArrayList.class); + + final HasGenerics test = new HasGenerics(); + test.list.add("moo"); + + roundTrip(6, test); + } + +====1 +1:122a +2:135,145c +3:135,145c + + public static class HasGenerics { + public List list = new ArrayList<>(); + + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + HasGenerics that = (HasGenerics) o; + return Objects.equals(list, that.list); + } + } diff --git a/src/python/merge_conflict_analysis_diffs/693/spork/diff_DefaultGenerics.java.txt b/src/python/merge_conflict_analysis_diffs/693/spork/diff_DefaultGenerics.java.txt new file mode 100644 index 0000000000..7db4978f42 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/693/spork/diff_DefaultGenerics.java.txt @@ -0,0 +1,94 @@ +====3 +1:1c +2:1c + /* Copyright (c) 2008-2022, Nathan Sweet +3:1c + /* Copyright (c) 2008-2023, Nathan Sweet +====1 +1:29,34c + public final class DefaultGenerics implements Generics { + private final Kryo kryo; + + private int genericTypesSize; + private GenericType[] genericTypes = new GenericType[16]; + private int[] depths = new int[16]; +2:29c +3:29c + public final class DefaultGenerics extends BaseGenerics { +====1 +1:40,91c + this.kryo = kryo; + } + + @Override + public void pushGenericType (GenericType fieldType) { + // Ensure genericTypes and depths capacity. + int size = genericTypesSize; + if (size + 1 == genericTypes.length) { + GenericType[] genericTypesNew = new GenericType[genericTypes.length << 1]; + System.arraycopy(genericTypes, 0, genericTypesNew, 0, size); + genericTypes = genericTypesNew; + int[] depthsNew = new int[depths.length << 1]; + System.arraycopy(depths, 0, depthsNew, 0, size); + depths = depthsNew; + } + + genericTypesSize = size + 1; + genericTypes[size] = fieldType; + depths[size] = kryo.getDepth(); + } + + @Override + public void popGenericType () { + int size = genericTypesSize; + if (size == 0) return; + size--; + if (depths[size] < kryo.getDepth()) return; + genericTypes[size] = null; + genericTypesSize = size; + } + + @Override + public GenericType[] nextGenericTypes () { + int index = genericTypesSize; + if (index > 0) { + index--; + GenericType genericType = genericTypes[index]; + if (genericType.arguments == null) return null; + // The depth must match to prevent the types being wrong if a serializer doesn't call nextGenericTypes. + if (depths[index] == kryo.getDepth() - 1) { + pushGenericType(genericType.arguments[genericType.arguments.length - 1]); + return genericType.arguments; + } + } + return null; + } + + @Override + public Class nextGenericClass () { + GenericType[] arguments = nextGenericTypes(); + if (arguments == null) return null; + return arguments[0].resolve(this); +2:35c +3:35c + super(kryo); +==== +1:96,97c + // Do not store type variables if hierarchy is empty or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; +2:40,41c + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length) return 0; +3:40,42c + // Do not store type variables if hierarchy is empty, or we do not have arguments for all root parameters, or we have more + // arguments than the hierarchy has parameters. + if (hierarchy.total == 0 || hierarchy.rootTotal > args.length || args.length > hierarchy.counts.length) return 0; +====1 +1:148,152c + @Override + public int getGenericTypesSize () { + return genericTypesSize; + } + +2:91a +3:92a diff --git a/src/python/merge_conflict_analysis_diffs/693/spork/diff_FieldSerializerBenchmark.java.txt b/src/python/merge_conflict_analysis_diffs/693/spork/diff_FieldSerializerBenchmark.java.txt new file mode 100644 index 0000000000..56e15b8e7d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/693/spork/diff_FieldSerializerBenchmark.java.txt @@ -0,0 +1,16 @@ +====3 +1:1c +2:1c + /* Copyright (c) 2008-2022, Nathan Sweet +3:1c + /* Copyright (c) 2008-2023, Nathan Sweet +====1 +1:77a +2:78c +3:78c + @Param({"DEFAULT", "MINIMAL", "NONE"}) public Kryo.GenericsStrategy generics; +====1 +1:110a +2:112c +3:112c + kryo.setGenericsStrategy(generics); diff --git a/src/python/merge_conflict_analysis_diffs/693/spork/diff_Kryo.java.txt b/src/python/merge_conflict_analysis_diffs/693/spork/diff_Kryo.java.txt new file mode 100644 index 0000000000..1051879250 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/693/spork/diff_Kryo.java.txt @@ -0,0 +1,66 @@ +====3 +1:1c +2:1c + /* Copyright (c) 2008-2022, Nathan Sweet +3:1c + /* Copyright (c) 2008-2023, Nathan Sweet +====1 +1:94a +2:95c +3:95c + import com.esotericsoftware.kryo.util.MinimalGenerics; +====3 +1:561c +2:562c + if (Proxy.isProxyClass(type)) { +3:562c + if (isProxy(type)) { +====3 +1:1265a +2:1266a +3:1267,1276c + /** Returns true if the specified type is a proxy. When true, Kryo uses {@link InvocationHandler} instead of the specified type + * to find the class {@link Registration}. + *

    + * This can be overridden to support alternative proxy checks. The default implementation delegates to + * {@link Proxy#isProxyClass(Class)}. */ + public boolean isProxy (Class type) { + if (type == null) throw new IllegalArgumentException("type cannot be null."); + return Proxy.isProxyClass(type); + } + +====1 +1:1294c + generics = optimizedGenerics ? new DefaultGenerics(this) : NoGenerics.INSTANCE; +2:1295,1324c +3:1305,1334c + setGenericsStrategy(optimizedGenerics ? GenericsStrategy.DEFAULT : GenericsStrategy.NONE); + } + + /** Sets a {@link GenericsStrategy}. + * + * TODO JavaDoc + * + * @param strategy the strategy for processing generics information */ + public void setGenericsStrategy (GenericsStrategy strategy) { + this.generics = strategy.createInstance(this); + } + + public enum GenericsStrategy { + DEFAULT { + public Generics createInstance (Kryo kryo) { + return new DefaultGenerics(kryo); + } + }, + MINIMAL { + public Generics createInstance (Kryo kryo) { + return new MinimalGenerics(kryo); + } + }, + NONE { + public Generics createInstance (Kryo kryo) { + return NoGenerics.INSTANCE; + } + }; + + public abstract Generics createInstance (Kryo kryo); diff --git a/src/python/merge_conflict_analysis_diffs/693/spork/diff_MapSerializerTest.java.txt b/src/python/merge_conflict_analysis_diffs/693/spork/diff_MapSerializerTest.java.txt new file mode 100644 index 0000000000..e0e9fabd3b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/693/spork/diff_MapSerializerTest.java.txt @@ -0,0 +1,21 @@ +====3 +1:1c +2:1c + /* Copyright (c) 2008-2022, Nathan Sweet +3:1c + /* Copyright (c) 2008-2023, Nathan Sweet +====3 +1:43c +2:43c + import org.apache.commons.lang.builder.EqualsBuilder; +3:43c + import org.apache.commons.lang3.builder.EqualsBuilder; +====1 +1:120c + input = new Input(output.toBytes()); +2:120,123c +3:120,123c + final byte[] bytes = output.toBytes(); + assertEquals(bytes.length, 13); + + input = new Input(bytes); diff --git a/src/python/merge_conflict_analysis_diffs/70/git_hires_merge/diff_StartingSpringApplicationRunListener.java.txt b/src/python/merge_conflict_analysis_diffs/70/git_hires_merge/diff_StartingSpringApplicationRunListener.java.txt new file mode 100644 index 0000000000..5c2b1d2d08 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/70/git_hires_merge/diff_StartingSpringApplicationRunListener.java.txt @@ -0,0 +1,32 @@ +====1 +1:21a +2:22c +3:22c + import com.alibaba.nacos.common.http.HttpClientManager; +====3 +1:114c +2:115c + +3:114a +====1 +1:155,157c + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + +2:156c +3:155c + HttpClientManager.shutdown(); +==== +1:162a +2:162,166c + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + + context.close(); +3:161,165c + + context.close(); + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); diff --git a/src/python/merge_conflict_analysis_diffs/70/gitmerge_ort/diff_StartingSpringApplicationRunListener.java.txt b/src/python/merge_conflict_analysis_diffs/70/gitmerge_ort/diff_StartingSpringApplicationRunListener.java.txt new file mode 100644 index 0000000000..8da5c2a7d1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/70/gitmerge_ort/diff_StartingSpringApplicationRunListener.java.txt @@ -0,0 +1,48 @@ +====1 +1:21a +2:22c +3:22c + import com.alibaba.nacos.common.http.HttpClientManager; +====1 +1:114c + +2:114a +3:114a +==== +1:155,157c + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + +2:155,162c + <<<<<<< HEAD + HttpClientManager.shutdown(); + ||||||| b10745b87 + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:155c + HttpClientManager.shutdown(); +==== +1:162a +2:168,180c + + <<<<<<< HEAD + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + + context.close(); + ||||||| b10745b87 + ======= + context.close(); + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + >>>>>>> TEMP_RIGHT_BRANCH +3:161,165c + + context.close(); + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); diff --git a/src/python/merge_conflict_analysis_diffs/70/gitmerge_ort_adjacent/diff_StartingSpringApplicationRunListener.java.txt b/src/python/merge_conflict_analysis_diffs/70/gitmerge_ort_adjacent/diff_StartingSpringApplicationRunListener.java.txt new file mode 100644 index 0000000000..5c2b1d2d08 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/70/gitmerge_ort_adjacent/diff_StartingSpringApplicationRunListener.java.txt @@ -0,0 +1,32 @@ +====1 +1:21a +2:22c +3:22c + import com.alibaba.nacos.common.http.HttpClientManager; +====3 +1:114c +2:115c + +3:114a +====1 +1:155,157c + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + +2:156c +3:155c + HttpClientManager.shutdown(); +==== +1:162a +2:162,166c + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + + context.close(); +3:161,165c + + context.close(); + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); diff --git a/src/python/merge_conflict_analysis_diffs/70/gitmerge_ort_ignorespace/diff_StartingSpringApplicationRunListener.java.txt b/src/python/merge_conflict_analysis_diffs/70/gitmerge_ort_ignorespace/diff_StartingSpringApplicationRunListener.java.txt new file mode 100644 index 0000000000..8da5c2a7d1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/70/gitmerge_ort_ignorespace/diff_StartingSpringApplicationRunListener.java.txt @@ -0,0 +1,48 @@ +====1 +1:21a +2:22c +3:22c + import com.alibaba.nacos.common.http.HttpClientManager; +====1 +1:114c + +2:114a +3:114a +==== +1:155,157c + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + +2:155,162c + <<<<<<< HEAD + HttpClientManager.shutdown(); + ||||||| b10745b87 + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:155c + HttpClientManager.shutdown(); +==== +1:162a +2:168,180c + + <<<<<<< HEAD + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + + context.close(); + ||||||| b10745b87 + ======= + context.close(); + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + >>>>>>> TEMP_RIGHT_BRANCH +3:161,165c + + context.close(); + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); diff --git a/src/python/merge_conflict_analysis_diffs/70/gitmerge_ort_imports/diff_StartingSpringApplicationRunListener.java.txt b/src/python/merge_conflict_analysis_diffs/70/gitmerge_ort_imports/diff_StartingSpringApplicationRunListener.java.txt new file mode 100644 index 0000000000..5c2b1d2d08 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/70/gitmerge_ort_imports/diff_StartingSpringApplicationRunListener.java.txt @@ -0,0 +1,32 @@ +====1 +1:21a +2:22c +3:22c + import com.alibaba.nacos.common.http.HttpClientManager; +====3 +1:114c +2:115c + +3:114a +====1 +1:155,157c + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + +2:156c +3:155c + HttpClientManager.shutdown(); +==== +1:162a +2:162,166c + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + + context.close(); +3:161,165c + + context.close(); + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); diff --git a/src/python/merge_conflict_analysis_diffs/70/gitmerge_ort_imports_ignorespace/diff_StartingSpringApplicationRunListener.java.txt b/src/python/merge_conflict_analysis_diffs/70/gitmerge_ort_imports_ignorespace/diff_StartingSpringApplicationRunListener.java.txt new file mode 100644 index 0000000000..5c2b1d2d08 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/70/gitmerge_ort_imports_ignorespace/diff_StartingSpringApplicationRunListener.java.txt @@ -0,0 +1,32 @@ +====1 +1:21a +2:22c +3:22c + import com.alibaba.nacos.common.http.HttpClientManager; +====3 +1:114c +2:115c + +3:114a +====1 +1:155,157c + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + +2:156c +3:155c + HttpClientManager.shutdown(); +==== +1:162a +2:162,166c + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + + context.close(); +3:161,165c + + context.close(); + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); diff --git a/src/python/merge_conflict_analysis_diffs/70/gitmerge_recursive_histogram/diff_StartingSpringApplicationRunListener.java.txt b/src/python/merge_conflict_analysis_diffs/70/gitmerge_recursive_histogram/diff_StartingSpringApplicationRunListener.java.txt new file mode 100644 index 0000000000..8da5c2a7d1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/70/gitmerge_recursive_histogram/diff_StartingSpringApplicationRunListener.java.txt @@ -0,0 +1,48 @@ +====1 +1:21a +2:22c +3:22c + import com.alibaba.nacos.common.http.HttpClientManager; +====1 +1:114c + +2:114a +3:114a +==== +1:155,157c + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + +2:155,162c + <<<<<<< HEAD + HttpClientManager.shutdown(); + ||||||| b10745b87 + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:155c + HttpClientManager.shutdown(); +==== +1:162a +2:168,180c + + <<<<<<< HEAD + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + + context.close(); + ||||||| b10745b87 + ======= + context.close(); + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + >>>>>>> TEMP_RIGHT_BRANCH +3:161,165c + + context.close(); + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); diff --git a/src/python/merge_conflict_analysis_diffs/70/gitmerge_recursive_ignorespace/diff_StartingSpringApplicationRunListener.java.txt b/src/python/merge_conflict_analysis_diffs/70/gitmerge_recursive_ignorespace/diff_StartingSpringApplicationRunListener.java.txt new file mode 100644 index 0000000000..8da5c2a7d1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/70/gitmerge_recursive_ignorespace/diff_StartingSpringApplicationRunListener.java.txt @@ -0,0 +1,48 @@ +====1 +1:21a +2:22c +3:22c + import com.alibaba.nacos.common.http.HttpClientManager; +====1 +1:114c + +2:114a +3:114a +==== +1:155,157c + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + +2:155,162c + <<<<<<< HEAD + HttpClientManager.shutdown(); + ||||||| b10745b87 + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:155c + HttpClientManager.shutdown(); +==== +1:162a +2:168,180c + + <<<<<<< HEAD + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + + context.close(); + ||||||| b10745b87 + ======= + context.close(); + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + >>>>>>> TEMP_RIGHT_BRANCH +3:161,165c + + context.close(); + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); diff --git a/src/python/merge_conflict_analysis_diffs/70/gitmerge_recursive_minimal/diff_StartingSpringApplicationRunListener.java.txt b/src/python/merge_conflict_analysis_diffs/70/gitmerge_recursive_minimal/diff_StartingSpringApplicationRunListener.java.txt new file mode 100644 index 0000000000..8da5c2a7d1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/70/gitmerge_recursive_minimal/diff_StartingSpringApplicationRunListener.java.txt @@ -0,0 +1,48 @@ +====1 +1:21a +2:22c +3:22c + import com.alibaba.nacos.common.http.HttpClientManager; +====1 +1:114c + +2:114a +3:114a +==== +1:155,157c + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + +2:155,162c + <<<<<<< HEAD + HttpClientManager.shutdown(); + ||||||| b10745b87 + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:155c + HttpClientManager.shutdown(); +==== +1:162a +2:168,180c + + <<<<<<< HEAD + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + + context.close(); + ||||||| b10745b87 + ======= + context.close(); + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + >>>>>>> TEMP_RIGHT_BRANCH +3:161,165c + + context.close(); + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); diff --git a/src/python/merge_conflict_analysis_diffs/70/gitmerge_recursive_myers/diff_StartingSpringApplicationRunListener.java.txt b/src/python/merge_conflict_analysis_diffs/70/gitmerge_recursive_myers/diff_StartingSpringApplicationRunListener.java.txt new file mode 100644 index 0000000000..8da5c2a7d1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/70/gitmerge_recursive_myers/diff_StartingSpringApplicationRunListener.java.txt @@ -0,0 +1,48 @@ +====1 +1:21a +2:22c +3:22c + import com.alibaba.nacos.common.http.HttpClientManager; +====1 +1:114c + +2:114a +3:114a +==== +1:155,157c + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + +2:155,162c + <<<<<<< HEAD + HttpClientManager.shutdown(); + ||||||| b10745b87 + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:155c + HttpClientManager.shutdown(); +==== +1:162a +2:168,180c + + <<<<<<< HEAD + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + + context.close(); + ||||||| b10745b87 + ======= + context.close(); + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + >>>>>>> TEMP_RIGHT_BRANCH +3:161,165c + + context.close(); + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); diff --git a/src/python/merge_conflict_analysis_diffs/70/gitmerge_recursive_patience/diff_StartingSpringApplicationRunListener.java.txt b/src/python/merge_conflict_analysis_diffs/70/gitmerge_recursive_patience/diff_StartingSpringApplicationRunListener.java.txt new file mode 100644 index 0000000000..8da5c2a7d1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/70/gitmerge_recursive_patience/diff_StartingSpringApplicationRunListener.java.txt @@ -0,0 +1,48 @@ +====1 +1:21a +2:22c +3:22c + import com.alibaba.nacos.common.http.HttpClientManager; +====1 +1:114c + +2:114a +3:114a +==== +1:155,157c + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + +2:155,162c + <<<<<<< HEAD + HttpClientManager.shutdown(); + ||||||| b10745b87 + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:155c + HttpClientManager.shutdown(); +==== +1:162a +2:168,180c + + <<<<<<< HEAD + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + + context.close(); + ||||||| b10745b87 + ======= + context.close(); + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + >>>>>>> TEMP_RIGHT_BRANCH +3:161,165c + + context.close(); + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); diff --git a/src/python/merge_conflict_analysis_diffs/70/intellimerge/diff_StartingSpringApplicationRunListener.java.txt b/src/python/merge_conflict_analysis_diffs/70/intellimerge/diff_StartingSpringApplicationRunListener.java.txt new file mode 100644 index 0000000000..b0c91f20f5 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/70/intellimerge/diff_StartingSpringApplicationRunListener.java.txt @@ -0,0 +1,49 @@ +====1 +1:21a +2:22c +3:22c + import com.alibaba.nacos.common.http.HttpClientManager; +====1 +1:114c + +2:114a +3:114a +==== +1:155,157c + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + +2:155,162c + <<<<<<< HEAD + HttpClientManager.shutdown(); + ||||||| b10745b87 + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + + ======= + >>>>>>> TEMP_RIGHT_BRANCH +3:155c + HttpClientManager.shutdown(); +==== +1:162a +2:168,181c + <<<<<<< HEAD + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + + context.close(); + ||||||| b10745b87 + ======= + + context.close(); + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + >>>>>>> TEMP_RIGHT_BRANCH +3:161,165c + + context.close(); + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); diff --git a/src/python/merge_conflict_analysis_diffs/70/spork/diff_StartingSpringApplicationRunListener.java.txt b/src/python/merge_conflict_analysis_diffs/70/spork/diff_StartingSpringApplicationRunListener.java.txt new file mode 100644 index 0000000000..5c2b1d2d08 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/70/spork/diff_StartingSpringApplicationRunListener.java.txt @@ -0,0 +1,32 @@ +====1 +1:21a +2:22c +3:22c + import com.alibaba.nacos.common.http.HttpClientManager; +====3 +1:114c +2:115c + +3:114a +====1 +1:155,157c + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + +2:156c +3:155c + HttpClientManager.shutdown(); +==== +1:162a +2:162,166c + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); + + context.close(); +3:161,165c + + context.close(); + + LOGGER.error("Nacos failed to start, please see {} for more details.", + Paths.get(ApplicationUtils.getNacosHome(), "logs/nacos.log")); diff --git a/src/python/merge_conflict_analysis_diffs/707/git_hires_merge/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/707/git_hires_merge/diff_pom.xml.txt new file mode 100644 index 0000000000..896c21cdb9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/707/git_hires_merge/diff_pom.xml.txt @@ -0,0 +1,18 @@ +====1 +1:13c + 2.15 +2:13c +3:13c + 2.16-SNAPSHOT +====1 +1:19c + 2.15.1-SNAPSHOT +2:19c +3:19c + 2.16.0-SNAPSHOT +====1 +1:110c + 1.6.8 +2:110c +3:110c + 1.6.13 diff --git a/src/python/merge_conflict_analysis_diffs/707/gitmerge_ort/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/707/gitmerge_ort/diff_pom.xml.txt new file mode 100644 index 0000000000..3212d88b5b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/707/gitmerge_ort/diff_pom.xml.txt @@ -0,0 +1,37 @@ +====1 +1:13c + 2.15 +2:13c +3:13c + 2.16-SNAPSHOT +==== +1:19c + 2.15.1-SNAPSHOT +2:19,25c + <<<<<<< HEAD + 2.16.0-SNAPSHOT + ||||||| 61efbfd + 2.15.1-SNAPSHOT + ======= + 2.15.3-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:19c + 2.16.0-SNAPSHOT +====2 +1:53c +3:53c + 2023-04-23T20:32:57Z +2:59c + 2023-05-30T20:33:25Z +====1 +1:110c + 1.6.8 +2:116c +3:110c + 1.6.13 +====2 +1:124c +3:124c + 0.2.0 +2:130c + 0.4.0 diff --git a/src/python/merge_conflict_analysis_diffs/707/gitmerge_ort_adjacent/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/707/gitmerge_ort_adjacent/diff_pom.xml.txt new file mode 100644 index 0000000000..896c21cdb9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/707/gitmerge_ort_adjacent/diff_pom.xml.txt @@ -0,0 +1,18 @@ +====1 +1:13c + 2.15 +2:13c +3:13c + 2.16-SNAPSHOT +====1 +1:19c + 2.15.1-SNAPSHOT +2:19c +3:19c + 2.16.0-SNAPSHOT +====1 +1:110c + 1.6.8 +2:110c +3:110c + 1.6.13 diff --git a/src/python/merge_conflict_analysis_diffs/707/gitmerge_ort_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/707/gitmerge_ort_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..3212d88b5b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/707/gitmerge_ort_ignorespace/diff_pom.xml.txt @@ -0,0 +1,37 @@ +====1 +1:13c + 2.15 +2:13c +3:13c + 2.16-SNAPSHOT +==== +1:19c + 2.15.1-SNAPSHOT +2:19,25c + <<<<<<< HEAD + 2.16.0-SNAPSHOT + ||||||| 61efbfd + 2.15.1-SNAPSHOT + ======= + 2.15.3-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:19c + 2.16.0-SNAPSHOT +====2 +1:53c +3:53c + 2023-04-23T20:32:57Z +2:59c + 2023-05-30T20:33:25Z +====1 +1:110c + 1.6.8 +2:116c +3:110c + 1.6.13 +====2 +1:124c +3:124c + 0.2.0 +2:130c + 0.4.0 diff --git a/src/python/merge_conflict_analysis_diffs/707/gitmerge_ort_imports/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/707/gitmerge_ort_imports/diff_pom.xml.txt new file mode 100644 index 0000000000..896c21cdb9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/707/gitmerge_ort_imports/diff_pom.xml.txt @@ -0,0 +1,18 @@ +====1 +1:13c + 2.15 +2:13c +3:13c + 2.16-SNAPSHOT +====1 +1:19c + 2.15.1-SNAPSHOT +2:19c +3:19c + 2.16.0-SNAPSHOT +====1 +1:110c + 1.6.8 +2:110c +3:110c + 1.6.13 diff --git a/src/python/merge_conflict_analysis_diffs/707/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/707/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..896c21cdb9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/707/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt @@ -0,0 +1,18 @@ +====1 +1:13c + 2.15 +2:13c +3:13c + 2.16-SNAPSHOT +====1 +1:19c + 2.15.1-SNAPSHOT +2:19c +3:19c + 2.16.0-SNAPSHOT +====1 +1:110c + 1.6.8 +2:110c +3:110c + 1.6.13 diff --git a/src/python/merge_conflict_analysis_diffs/707/gitmerge_recursive_histogram/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/707/gitmerge_recursive_histogram/diff_pom.xml.txt new file mode 100644 index 0000000000..3212d88b5b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/707/gitmerge_recursive_histogram/diff_pom.xml.txt @@ -0,0 +1,37 @@ +====1 +1:13c + 2.15 +2:13c +3:13c + 2.16-SNAPSHOT +==== +1:19c + 2.15.1-SNAPSHOT +2:19,25c + <<<<<<< HEAD + 2.16.0-SNAPSHOT + ||||||| 61efbfd + 2.15.1-SNAPSHOT + ======= + 2.15.3-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:19c + 2.16.0-SNAPSHOT +====2 +1:53c +3:53c + 2023-04-23T20:32:57Z +2:59c + 2023-05-30T20:33:25Z +====1 +1:110c + 1.6.8 +2:116c +3:110c + 1.6.13 +====2 +1:124c +3:124c + 0.2.0 +2:130c + 0.4.0 diff --git a/src/python/merge_conflict_analysis_diffs/707/gitmerge_recursive_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/707/gitmerge_recursive_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..3212d88b5b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/707/gitmerge_recursive_ignorespace/diff_pom.xml.txt @@ -0,0 +1,37 @@ +====1 +1:13c + 2.15 +2:13c +3:13c + 2.16-SNAPSHOT +==== +1:19c + 2.15.1-SNAPSHOT +2:19,25c + <<<<<<< HEAD + 2.16.0-SNAPSHOT + ||||||| 61efbfd + 2.15.1-SNAPSHOT + ======= + 2.15.3-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:19c + 2.16.0-SNAPSHOT +====2 +1:53c +3:53c + 2023-04-23T20:32:57Z +2:59c + 2023-05-30T20:33:25Z +====1 +1:110c + 1.6.8 +2:116c +3:110c + 1.6.13 +====2 +1:124c +3:124c + 0.2.0 +2:130c + 0.4.0 diff --git a/src/python/merge_conflict_analysis_diffs/707/gitmerge_recursive_minimal/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/707/gitmerge_recursive_minimal/diff_pom.xml.txt new file mode 100644 index 0000000000..3212d88b5b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/707/gitmerge_recursive_minimal/diff_pom.xml.txt @@ -0,0 +1,37 @@ +====1 +1:13c + 2.15 +2:13c +3:13c + 2.16-SNAPSHOT +==== +1:19c + 2.15.1-SNAPSHOT +2:19,25c + <<<<<<< HEAD + 2.16.0-SNAPSHOT + ||||||| 61efbfd + 2.15.1-SNAPSHOT + ======= + 2.15.3-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:19c + 2.16.0-SNAPSHOT +====2 +1:53c +3:53c + 2023-04-23T20:32:57Z +2:59c + 2023-05-30T20:33:25Z +====1 +1:110c + 1.6.8 +2:116c +3:110c + 1.6.13 +====2 +1:124c +3:124c + 0.2.0 +2:130c + 0.4.0 diff --git a/src/python/merge_conflict_analysis_diffs/707/gitmerge_recursive_myers/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/707/gitmerge_recursive_myers/diff_pom.xml.txt new file mode 100644 index 0000000000..3212d88b5b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/707/gitmerge_recursive_myers/diff_pom.xml.txt @@ -0,0 +1,37 @@ +====1 +1:13c + 2.15 +2:13c +3:13c + 2.16-SNAPSHOT +==== +1:19c + 2.15.1-SNAPSHOT +2:19,25c + <<<<<<< HEAD + 2.16.0-SNAPSHOT + ||||||| 61efbfd + 2.15.1-SNAPSHOT + ======= + 2.15.3-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:19c + 2.16.0-SNAPSHOT +====2 +1:53c +3:53c + 2023-04-23T20:32:57Z +2:59c + 2023-05-30T20:33:25Z +====1 +1:110c + 1.6.8 +2:116c +3:110c + 1.6.13 +====2 +1:124c +3:124c + 0.2.0 +2:130c + 0.4.0 diff --git a/src/python/merge_conflict_analysis_diffs/707/gitmerge_recursive_patience/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/707/gitmerge_recursive_patience/diff_pom.xml.txt new file mode 100644 index 0000000000..3212d88b5b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/707/gitmerge_recursive_patience/diff_pom.xml.txt @@ -0,0 +1,37 @@ +====1 +1:13c + 2.15 +2:13c +3:13c + 2.16-SNAPSHOT +==== +1:19c + 2.15.1-SNAPSHOT +2:19,25c + <<<<<<< HEAD + 2.16.0-SNAPSHOT + ||||||| 61efbfd + 2.15.1-SNAPSHOT + ======= + 2.15.3-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:19c + 2.16.0-SNAPSHOT +====2 +1:53c +3:53c + 2023-04-23T20:32:57Z +2:59c + 2023-05-30T20:33:25Z +====1 +1:110c + 1.6.8 +2:116c +3:110c + 1.6.13 +====2 +1:124c +3:124c + 0.2.0 +2:130c + 0.4.0 diff --git a/src/python/merge_conflict_analysis_diffs/707/intellimerge/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/707/intellimerge/diff_pom.xml.txt new file mode 100644 index 0000000000..3212d88b5b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/707/intellimerge/diff_pom.xml.txt @@ -0,0 +1,37 @@ +====1 +1:13c + 2.15 +2:13c +3:13c + 2.16-SNAPSHOT +==== +1:19c + 2.15.1-SNAPSHOT +2:19,25c + <<<<<<< HEAD + 2.16.0-SNAPSHOT + ||||||| 61efbfd + 2.15.1-SNAPSHOT + ======= + 2.15.3-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:19c + 2.16.0-SNAPSHOT +====2 +1:53c +3:53c + 2023-04-23T20:32:57Z +2:59c + 2023-05-30T20:33:25Z +====1 +1:110c + 1.6.8 +2:116c +3:110c + 1.6.13 +====2 +1:124c +3:124c + 0.2.0 +2:130c + 0.4.0 diff --git a/src/python/merge_conflict_analysis_diffs/707/spork/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/707/spork/diff_pom.xml.txt new file mode 100644 index 0000000000..896c21cdb9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/707/spork/diff_pom.xml.txt @@ -0,0 +1,18 @@ +====1 +1:13c + 2.15 +2:13c +3:13c + 2.16-SNAPSHOT +====1 +1:19c + 2.15.1-SNAPSHOT +2:19c +3:19c + 2.16.0-SNAPSHOT +====1 +1:110c + 1.6.8 +2:110c +3:110c + 1.6.13 diff --git a/src/python/merge_conflict_analysis_diffs/809/git_hires_merge/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/809/git_hires_merge/diff_VERSION.txt new file mode 100644 index 0000000000..426ea8faae --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/git_hires_merge/diff_VERSION.txt @@ -0,0 +1,25 @@ +==== +1:17c + 2.6.3 (not yet released) +2:17,26c + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (not yet released) +3:17,26c + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (12-Oct-2015) diff --git a/src/python/merge_conflict_analysis_diffs/809/git_hires_merge/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/809/git_hires_merge/diff_pom.xml.txt new file mode 100644 index 0000000000..a380c45292 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/git_hires_merge/diff_pom.xml.txt @@ -0,0 +1,28 @@ +====1 +1:6c + 2.6.1 +2:6c +3:6c + 2.6.2 +====1 +1:12c + 2.6.3-SNAPSHOT +2:12c +3:12c + 2.7.0-SNAPSHOT +====3 +1:14,15c +2:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + +3:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + 2008 +====1 +1:25a +2:26,29c +3:26,29c + + 1.6 + 1.6 + diff --git a/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort/diff_VERSION.txt new file mode 100644 index 0000000000..6573bfb0e9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort/diff_VERSION.txt @@ -0,0 +1,31 @@ +==== +1:17c + 2.6.3 (not yet released) +2:17,32c + <<<<<<< HEAD + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (not yet released) + ||||||| f554808f2 + 2.6.3 (not yet released) + ======= + 2.6.3 (12-Oct-2015) + >>>>>>> TEMP_RIGHT_BRANCH +3:17,26c + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (12-Oct-2015) diff --git a/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort/diff_pom.xml.txt new file mode 100644 index 0000000000..1774bac65f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort/diff_pom.xml.txt @@ -0,0 +1,35 @@ +====1 +1:6c + 2.6.1 +2:6c +3:6c + 2.6.2 +==== +1:12c + 2.6.3-SNAPSHOT +2:12,18c + <<<<<<< HEAD + 2.7.0-SNAPSHOT + ||||||| f554808f2 + 2.6.3-SNAPSHOT + ======= + 2.6.4-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:12c + 2.7.0-SNAPSHOT +====1 +1:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + +2:20,21c +3:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + 2008 +====1 +1:25a +2:32,35c +3:26,29c + + 1.6 + 1.6 + diff --git a/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_adjacent/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_adjacent/diff_VERSION.txt new file mode 100644 index 0000000000..426ea8faae --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_adjacent/diff_VERSION.txt @@ -0,0 +1,25 @@ +==== +1:17c + 2.6.3 (not yet released) +2:17,26c + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (not yet released) +3:17,26c + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (12-Oct-2015) diff --git a/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_adjacent/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_adjacent/diff_pom.xml.txt new file mode 100644 index 0000000000..a380c45292 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_adjacent/diff_pom.xml.txt @@ -0,0 +1,28 @@ +====1 +1:6c + 2.6.1 +2:6c +3:6c + 2.6.2 +====1 +1:12c + 2.6.3-SNAPSHOT +2:12c +3:12c + 2.7.0-SNAPSHOT +====3 +1:14,15c +2:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + +3:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + 2008 +====1 +1:25a +2:26,29c +3:26,29c + + 1.6 + 1.6 + diff --git a/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_ignorespace/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_ignorespace/diff_VERSION.txt new file mode 100644 index 0000000000..6573bfb0e9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_ignorespace/diff_VERSION.txt @@ -0,0 +1,31 @@ +==== +1:17c + 2.6.3 (not yet released) +2:17,32c + <<<<<<< HEAD + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (not yet released) + ||||||| f554808f2 + 2.6.3 (not yet released) + ======= + 2.6.3 (12-Oct-2015) + >>>>>>> TEMP_RIGHT_BRANCH +3:17,26c + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (12-Oct-2015) diff --git a/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..1774bac65f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_ignorespace/diff_pom.xml.txt @@ -0,0 +1,35 @@ +====1 +1:6c + 2.6.1 +2:6c +3:6c + 2.6.2 +==== +1:12c + 2.6.3-SNAPSHOT +2:12,18c + <<<<<<< HEAD + 2.7.0-SNAPSHOT + ||||||| f554808f2 + 2.6.3-SNAPSHOT + ======= + 2.6.4-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:12c + 2.7.0-SNAPSHOT +====1 +1:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + +2:20,21c +3:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + 2008 +====1 +1:25a +2:32,35c +3:26,29c + + 1.6 + 1.6 + diff --git a/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_imports/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_imports/diff_VERSION.txt new file mode 100644 index 0000000000..426ea8faae --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_imports/diff_VERSION.txt @@ -0,0 +1,25 @@ +==== +1:17c + 2.6.3 (not yet released) +2:17,26c + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (not yet released) +3:17,26c + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (12-Oct-2015) diff --git a/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_imports/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_imports/diff_pom.xml.txt new file mode 100644 index 0000000000..a380c45292 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_imports/diff_pom.xml.txt @@ -0,0 +1,28 @@ +====1 +1:6c + 2.6.1 +2:6c +3:6c + 2.6.2 +====1 +1:12c + 2.6.3-SNAPSHOT +2:12c +3:12c + 2.7.0-SNAPSHOT +====3 +1:14,15c +2:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + +3:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + 2008 +====1 +1:25a +2:26,29c +3:26,29c + + 1.6 + 1.6 + diff --git a/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_imports_ignorespace/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_imports_ignorespace/diff_VERSION.txt new file mode 100644 index 0000000000..426ea8faae --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_imports_ignorespace/diff_VERSION.txt @@ -0,0 +1,25 @@ +==== +1:17c + 2.6.3 (not yet released) +2:17,26c + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (not yet released) +3:17,26c + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (12-Oct-2015) diff --git a/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..a380c45292 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/gitmerge_ort_imports_ignorespace/diff_pom.xml.txt @@ -0,0 +1,28 @@ +====1 +1:6c + 2.6.1 +2:6c +3:6c + 2.6.2 +====1 +1:12c + 2.6.3-SNAPSHOT +2:12c +3:12c + 2.7.0-SNAPSHOT +====3 +1:14,15c +2:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + +3:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + 2008 +====1 +1:25a +2:26,29c +3:26,29c + + 1.6 + 1.6 + diff --git a/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_histogram/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_histogram/diff_VERSION.txt new file mode 100644 index 0000000000..6573bfb0e9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_histogram/diff_VERSION.txt @@ -0,0 +1,31 @@ +==== +1:17c + 2.6.3 (not yet released) +2:17,32c + <<<<<<< HEAD + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (not yet released) + ||||||| f554808f2 + 2.6.3 (not yet released) + ======= + 2.6.3 (12-Oct-2015) + >>>>>>> TEMP_RIGHT_BRANCH +3:17,26c + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (12-Oct-2015) diff --git a/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_histogram/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_histogram/diff_pom.xml.txt new file mode 100644 index 0000000000..1774bac65f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_histogram/diff_pom.xml.txt @@ -0,0 +1,35 @@ +====1 +1:6c + 2.6.1 +2:6c +3:6c + 2.6.2 +==== +1:12c + 2.6.3-SNAPSHOT +2:12,18c + <<<<<<< HEAD + 2.7.0-SNAPSHOT + ||||||| f554808f2 + 2.6.3-SNAPSHOT + ======= + 2.6.4-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:12c + 2.7.0-SNAPSHOT +====1 +1:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + +2:20,21c +3:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + 2008 +====1 +1:25a +2:32,35c +3:26,29c + + 1.6 + 1.6 + diff --git a/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_ignorespace/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_ignorespace/diff_VERSION.txt new file mode 100644 index 0000000000..6573bfb0e9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_ignorespace/diff_VERSION.txt @@ -0,0 +1,31 @@ +==== +1:17c + 2.6.3 (not yet released) +2:17,32c + <<<<<<< HEAD + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (not yet released) + ||||||| f554808f2 + 2.6.3 (not yet released) + ======= + 2.6.3 (12-Oct-2015) + >>>>>>> TEMP_RIGHT_BRANCH +3:17,26c + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (12-Oct-2015) diff --git a/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_ignorespace/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_ignorespace/diff_pom.xml.txt new file mode 100644 index 0000000000..1774bac65f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_ignorespace/diff_pom.xml.txt @@ -0,0 +1,35 @@ +====1 +1:6c + 2.6.1 +2:6c +3:6c + 2.6.2 +==== +1:12c + 2.6.3-SNAPSHOT +2:12,18c + <<<<<<< HEAD + 2.7.0-SNAPSHOT + ||||||| f554808f2 + 2.6.3-SNAPSHOT + ======= + 2.6.4-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:12c + 2.7.0-SNAPSHOT +====1 +1:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + +2:20,21c +3:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + 2008 +====1 +1:25a +2:32,35c +3:26,29c + + 1.6 + 1.6 + diff --git a/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_minimal/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_minimal/diff_VERSION.txt new file mode 100644 index 0000000000..6573bfb0e9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_minimal/diff_VERSION.txt @@ -0,0 +1,31 @@ +==== +1:17c + 2.6.3 (not yet released) +2:17,32c + <<<<<<< HEAD + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (not yet released) + ||||||| f554808f2 + 2.6.3 (not yet released) + ======= + 2.6.3 (12-Oct-2015) + >>>>>>> TEMP_RIGHT_BRANCH +3:17,26c + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (12-Oct-2015) diff --git a/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_minimal/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_minimal/diff_pom.xml.txt new file mode 100644 index 0000000000..1774bac65f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_minimal/diff_pom.xml.txt @@ -0,0 +1,35 @@ +====1 +1:6c + 2.6.1 +2:6c +3:6c + 2.6.2 +==== +1:12c + 2.6.3-SNAPSHOT +2:12,18c + <<<<<<< HEAD + 2.7.0-SNAPSHOT + ||||||| f554808f2 + 2.6.3-SNAPSHOT + ======= + 2.6.4-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:12c + 2.7.0-SNAPSHOT +====1 +1:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + +2:20,21c +3:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + 2008 +====1 +1:25a +2:32,35c +3:26,29c + + 1.6 + 1.6 + diff --git a/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_myers/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_myers/diff_VERSION.txt new file mode 100644 index 0000000000..6573bfb0e9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_myers/diff_VERSION.txt @@ -0,0 +1,31 @@ +==== +1:17c + 2.6.3 (not yet released) +2:17,32c + <<<<<<< HEAD + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (not yet released) + ||||||| f554808f2 + 2.6.3 (not yet released) + ======= + 2.6.3 (12-Oct-2015) + >>>>>>> TEMP_RIGHT_BRANCH +3:17,26c + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (12-Oct-2015) diff --git a/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_myers/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_myers/diff_pom.xml.txt new file mode 100644 index 0000000000..1774bac65f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_myers/diff_pom.xml.txt @@ -0,0 +1,35 @@ +====1 +1:6c + 2.6.1 +2:6c +3:6c + 2.6.2 +==== +1:12c + 2.6.3-SNAPSHOT +2:12,18c + <<<<<<< HEAD + 2.7.0-SNAPSHOT + ||||||| f554808f2 + 2.6.3-SNAPSHOT + ======= + 2.6.4-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:12c + 2.7.0-SNAPSHOT +====1 +1:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + +2:20,21c +3:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + 2008 +====1 +1:25a +2:32,35c +3:26,29c + + 1.6 + 1.6 + diff --git a/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_patience/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_patience/diff_VERSION.txt new file mode 100644 index 0000000000..6573bfb0e9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_patience/diff_VERSION.txt @@ -0,0 +1,31 @@ +==== +1:17c + 2.6.3 (not yet released) +2:17,32c + <<<<<<< HEAD + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (not yet released) + ||||||| f554808f2 + 2.6.3 (not yet released) + ======= + 2.6.3 (12-Oct-2015) + >>>>>>> TEMP_RIGHT_BRANCH +3:17,26c + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (12-Oct-2015) diff --git a/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_patience/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_patience/diff_pom.xml.txt new file mode 100644 index 0000000000..1774bac65f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/gitmerge_recursive_patience/diff_pom.xml.txt @@ -0,0 +1,35 @@ +====1 +1:6c + 2.6.1 +2:6c +3:6c + 2.6.2 +==== +1:12c + 2.6.3-SNAPSHOT +2:12,18c + <<<<<<< HEAD + 2.7.0-SNAPSHOT + ||||||| f554808f2 + 2.6.3-SNAPSHOT + ======= + 2.6.4-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:12c + 2.7.0-SNAPSHOT +====1 +1:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + +2:20,21c +3:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + 2008 +====1 +1:25a +2:32,35c +3:26,29c + + 1.6 + 1.6 + diff --git a/src/python/merge_conflict_analysis_diffs/809/intellimerge/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/809/intellimerge/diff_VERSION.txt new file mode 100644 index 0000000000..6573bfb0e9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/intellimerge/diff_VERSION.txt @@ -0,0 +1,31 @@ +==== +1:17c + 2.6.3 (not yet released) +2:17,32c + <<<<<<< HEAD + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (not yet released) + ||||||| f554808f2 + 2.6.3 (not yet released) + ======= + 2.6.3 (12-Oct-2015) + >>>>>>> TEMP_RIGHT_BRANCH +3:17,26c + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (12-Oct-2015) diff --git a/src/python/merge_conflict_analysis_diffs/809/intellimerge/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/809/intellimerge/diff_pom.xml.txt new file mode 100644 index 0000000000..1774bac65f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/intellimerge/diff_pom.xml.txt @@ -0,0 +1,35 @@ +====1 +1:6c + 2.6.1 +2:6c +3:6c + 2.6.2 +==== +1:12c + 2.6.3-SNAPSHOT +2:12,18c + <<<<<<< HEAD + 2.7.0-SNAPSHOT + ||||||| f554808f2 + 2.6.3-SNAPSHOT + ======= + 2.6.4-SNAPSHOT + >>>>>>> TEMP_RIGHT_BRANCH +3:12c + 2.7.0-SNAPSHOT +====1 +1:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + +2:20,21c +3:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + 2008 +====1 +1:25a +2:32,35c +3:26,29c + + 1.6 + 1.6 + diff --git a/src/python/merge_conflict_analysis_diffs/809/spork/diff_VERSION.txt b/src/python/merge_conflict_analysis_diffs/809/spork/diff_VERSION.txt new file mode 100644 index 0000000000..426ea8faae --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/spork/diff_VERSION.txt @@ -0,0 +1,25 @@ +==== +1:17c + 2.6.3 (not yet released) +2:17,26c + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (not yet released) +3:17,26c + 2.7.0 (not yet released) + + #198: Add back-references to `JsonParser` / `JsonGenerator` for low-level parsing issues + (via `JsonParseException`, `JsonGenerationException`) + #211: Typo of function name com.fasterxml.jackson.core.Version.isUknownVersion() + (reported by timray@github) + - Implemented `ReaderBasedJsonParser.nextFieldName(SerializableString)` + (to improved Afterburner performance over String/char[] sources) + + 2.6.3 (12-Oct-2015) diff --git a/src/python/merge_conflict_analysis_diffs/809/spork/diff_pom.xml.txt b/src/python/merge_conflict_analysis_diffs/809/spork/diff_pom.xml.txt new file mode 100644 index 0000000000..a380c45292 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/809/spork/diff_pom.xml.txt @@ -0,0 +1,28 @@ +====1 +1:6c + 2.6.1 +2:6c +3:6c + 2.6.2 +====1 +1:12c + 2.6.3-SNAPSHOT +2:12c +3:12c + 2.7.0-SNAPSHOT +====3 +1:14,15c +2:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + +3:14,15c + Core Jackson abstractions, basic JSON streaming API implementation + 2008 +====1 +1:25a +2:26,29c +3:26,29c + + 1.6 + 1.6 + diff --git a/src/python/merge_conflict_analysis_diffs/845/git_hires_merge/diff_DoubleToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/git_hires_merge/diff_DoubleToDecimal.java.txt new file mode 100644 index 0000000000..8d03f594b1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/git_hires_merge/diff_DoubleToDecimal.java.txt @@ -0,0 +1,585 @@ +2,3c2 +< * Copyright (c) 2021, 2022, Oracle and/or its affiliates. All rights reserved. +< * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. +--- +> * Copyright 2018-2020 Raffaello Giulietti +5,9c4,9 +< * This code is free software; you can redistribute it and/or modify it +< * under the terms of the GNU General Public License version 2 only, as +< * published by the Free Software Foundation. Oracle designates this +< * particular file as subject to the "Classpath" exception as provided +< * by Oracle in the LICENSE file that accompanied this code. +--- +> * Permission is hereby granted, free of charge, to any person obtaining a copy +> * of this software and associated documentation files (the "Software"), to deal +> * in the Software without restriction, including without limitation the rights +> * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> * copies of the Software, and to permit persons to whom the Software is +> * furnished to do so, subject to the following conditions: +11,15c11,12 +< * This code is distributed in the hope that it will be useful, but WITHOUT +< * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +< * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +< * version 2 for more details (a copy is included in the LICENSE file that +< * accompanied this code). +--- +> * The above copyright notice and this permission notice shall be included in +> * all copies or substantial portions of the Software. +17,23c14,20 +< * You should have received a copy of the GNU General Public License version +< * 2 along with this work; if not, write to the Free Software Foundation, +< * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. +< * +< * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA +< * or visit www.oracle.com if you need additional information or have any +< * questions. +--- +> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +> * THE SOFTWARE. +30,32d26 +< import static java.lang.Double.doubleToRawLongBits; +< import static java.lang.Long.numberOfLeadingZeros; +< +40a35,37 +> import static java.lang.Double.doubleToRawLongBits; +> import static java.lang.Long.numberOfLeadingZeros; +> +42a40,41 +> * +> * @author Raffaello Giulietti +46,57c45,56 +< * For full details about this code see the following references: +< * +< * [1] Giulietti, "The Schubfach way to render doubles", +< * https://drive.google.com/file/d/1gp5xv4CAa78SVgCeWfGqqI4FfYYYuNFb +< * +< * [2] IEEE Computer Society, "IEEE Standard for Floating-Point Arithmetic" +< * +< * [3] Bouvier & Zimmermann, "Division-Free Binary-to-Decimal Conversion" +< * +< * Divisions are avoided altogether for the benefit of those architectures +< * that do not provide specific machine instructions or where they are slow. +< * This is discussed in section 10 of [1]. +--- +> For full details about this code see the following references: +> +> [1] Giulietti, "The Schubfach way to render doubles", +> https://drive.google.com/open?id=1luHhyQF9zKlM8yJ1nebU0OgVYhfC6CBN +> +> [2] IEEE Computer Society, "IEEE Standard for Floating-Point Arithmetic" +> +> [3] Bouvier & Zimmermann, "Division-Free Binary-to-Decimal Conversion" +> +> Divisions are avoided altogether for the benefit of those architectures +> that do not provide specific machine instructions or where they are slow. +> This is discussed in section 10 of [1]. +60c59,61 +< /* The precision in bits */ +--- +> // Sources with the license are here: https://github.com/c4f7fcce9cb06515/Schubfach/blob/3c92d3c9b1fead540616c918cdfef432bca53dfa/todec/src/math/FloatToDecimal.java +> +> // The precision in bits. +63c64 +< /* Exponent width in bits */ +--- +> // Exponent width in bits. +66,67c67,68 +< /* Minimum value of the exponent: -(2^(W-1)) - P + 3 */ +< static final int Q_MIN = (-1 << (W - 1)) - P + 3; +--- +> // Minimum value of the exponent: -(2^(W-1)) - P + 3. +> static final int Q_MIN = (-1 << W - 1) - P + 3; +69,70c70,71 +< /* Maximum value of the exponent: 2^(W-1) - P */ +< static final int Q_MAX = (1 << (W - 1)) - P; +--- +> // Maximum value of the exponent: 2^(W-1) - P. +> static final int Q_MAX = (1 << W - 1) - P; +72c73 +< /* 10^(E_MIN - 1) <= MIN_VALUE < 10^E_MIN */ +--- +> // 10^(E_MIN - 1) <= MIN_VALUE < 10^E_MIN +75c76 +< /* 10^(E_MAX - 1) <= MAX_VALUE < 10^E_MAX */ +--- +> // 10^(E_MAX - 1) <= MAX_VALUE < 10^E_MAX +78c79 +< /* Threshold to detect tiny values, as in section 8.2.1 of [1] */ +--- +> // Threshold to detect tiny values, as in section 8.1.1 of [1] +81c82 +< /* The minimum and maximum k, as in section 8 of [1] */ +--- +> // The minimum and maximum k, as in section 8 of [1] +85c86 +< /* H is as in section 8.1 of [1] */ +--- +> // H is as in section 8 of [1]. +88,89c89,90 +< /* Minimum value of the significand of a normal value: 2^(P-1) */ +< private static final long C_MIN = 1L << (P - 1); +--- +> // Minimum value of the significand of a normal value: 2^(P-1). +> private static final long C_MIN = 1L << P - 1; +91c92 +< /* Mask to extract the biased exponent */ +--- +> // Mask to extract the biased exponent. +94,95c95,96 +< /* Mask to extract the fraction bits */ +< private static final long T_MASK = (1L << (P - 1)) - 1; +--- +> // Mask to extract the fraction bits. +> private static final long T_MASK = (1L << P - 1) - 1; +97c98 +< /* Used in rop() */ +--- +> // Used in rop(). +100c101 +< /* Used for left-to-tight digit extraction */ +--- +> // Used for left-to-tight digit extraction. +103,108c104,113 +< private static final int NON_SPECIAL = 0; +< private static final int PLUS_ZERO = 1; +< private static final int MINUS_ZERO = 2; +< private static final int PLUS_INF = 3; +< private static final int MINUS_INF = 4; +< private static final int NAN = 5; +--- +> private static final int NON_SPECIAL = 0; +> private static final int PLUS_ZERO = 1; +> private static final int MINUS_ZERO = 2; +> private static final int PLUS_INF = 3; +> private static final int MINUS_INF = 4; +> private static final int NAN = 5; +> +> // For thread-safety, each thread gets its own instance of this class. +> private static final ThreadLocal threadLocal = +> ThreadLocal.withInitial(DoubleToDecimal::new); +111,115c116,120 +< * Room for the longer of the forms +< * -ddddd.dddddddddddd H + 2 characters +< * -0.00ddddddddddddddddd H + 5 characters +< * -d.ddddddddddddddddE-eee H + 7 characters +< * where there are H digits d +--- +> Room for the longer of the forms +> -ddddd.dddddddddddd H + 2 characters +> -0.00ddddddddddddddddd H + 5 characters +> -d.ddddddddddddddddE-eee H + 7 characters +> where there are H digits d +117c122 +< public static final int MAX_CHARS = H + 7; +--- +> public final int MAX_CHARS = H + 7; +118a124 +> // Numerical results are created here... +121c127,130 +< /* Index into bytes of rightmost valid character */ +--- +> // ... and copied here in appendTo() +> private final char[] chars = new char[MAX_CHARS]; +> +> // Index into bytes of rightmost valid character. +128,129c137,189 +< * Returns a string representation of the {@code double} +< * argument. All characters mentioned below are ASCII characters. +--- +> * Returns a string rendering of the {@code double} argument. +> * +> *

    The characters of the result are all drawn from the ASCII set. +> *

      +> *
    • Any NaN, whether quiet or signaling, is rendered as +> * {@code "NaN"}, regardless of the sign bit. +> *
    • The infinities +∞ and -∞ are rendered as +> * {@code "Infinity"} and {@code "-Infinity"}, respectively. +> *
    • The positive and negative zeroes are rendered as +> * {@code "0.0"} and {@code "-0.0"}, respectively. +> *
    • A finite negative {@code v} is rendered as the sign +> * '{@code -}' followed by the rendering of the magnitude -{@code v}. +> *
    • A finite positive {@code v} is rendered in two stages: +> *
        +> *
      • Selection of a decimal: A well-defined +> * decimal dv is selected +> * to represent {@code v}. +> *
      • Formatting as a string: The decimal +> * dv is formatted as a string, +> * either in plain or in computerized scientific notation, +> * depending on its value. +> *
      +> *
    +> * +> *

    A decimal is a number of the form +> * d×10i +> * for some (unique) integers d > 0 and i such that +> * d is not a multiple of 10. +> * These integers are the significand and +> * the exponent, respectively, of the decimal. +> * The length of the decimal is the (unique) +> * integer n meeting +> * 10n-1d < 10n. +> * +> *

    The decimal dv +> * for a finite positive {@code v} is defined as follows: +> *

      +> *
    • Let R be the set of all decimals that round to {@code v} +> * according to the usual round-to-closest rule of +> * IEEE 754 floating-point arithmetic. +> *
    • Let m be the minimal length over all decimals in R. +> *
    • When m ≥ 2, let T be the set of all decimals +> * in R with length m. +> * Otherwise, let T be the set of all decimals +> * in R with length 1 or 2. +> *
    • Define dv as +> * the decimal in T that is closest to {@code v}. +> * Or if there are two such decimals in T, +> * select the one with the even significand (there is exactly one). +> *
    +> * +> *

    The (uniquely) selected decimal dv +> * is then formatted. +131,133c191,247 +< * @param v the {@code double} to be converted. +< * @return a string representation of the argument. +< * @see Double#toString(double) +--- +> *

    Let d, i and n be the significand, exponent and +> * length of dv, respectively. +> * Further, let e = n + i - 1 and let +> * d1dn +> * be the usual decimal expansion of the significand. +> * Note that d1 ≠ 0 ≠ dn. +> *

      +> *
    • Case -3 ≤ e < 0: +> * dv is formatted as +> * 0.00d1dn, +> * where there are exactly -(n + i) zeroes between +> * the decimal point and d1. +> * For example, 123 × 10-4 is formatted as +> * {@code 0.0123}. +> *
    • Case 0 ≤ e < 7: +> *
        +> *
      • Subcase i ≥ 0: +> * dv is formatted as +> * d1dn00.0, +> * where there are exactly i zeroes +> * between dn and the decimal point. +> * For example, 123 × 102 is formatted as +> * {@code 12300.0}. +> *
      • Subcase i < 0: +> * dv is formatted as +> * d1dn+i.dn+i+1dn. +> * There are exactly -i digits to the right of +> * the decimal point. +> * For example, 123 × 10-1 is formatted as +> * {@code 12.3}. +> *
      +> *
    • Case e < -3 or e ≥ 7: +> * computerized scientific notation is used to format +> * dv. +> * Here e is formatted as by {@link Integer#toString(int)}. +> *
        +> *
      • Subcase n = 1: +> * dv is formatted as +> * d1.0Ee. +> * For example, 1 × 1023 is formatted as +> * {@code 1.0E23}. +> *
      • Subcase n > 1: +> * dv is formatted as +> * d1.d2dnEe. +> * For example, 123 × 10-21 is formatted as +> * {@code 1.23E-19}. +> *
      +> *
    +> * +> * @param v the {@code double} to be rendered. +> * @return a string rendering of the argument. +136c250 +< return new DoubleToDecimal().toDecimalString(v); +--- +> return threadLocalInstance().toDecimalString(v); +152c266,270 +< return new DoubleToDecimal().appendDecimalTo(v, app); +--- +> return threadLocalInstance().appendDecimalTo(v, app); +> } +> +> private static DoubleToDecimal threadLocalInstance() { +> return threadLocal.get(); +170,171c288 +< char[] chars = new char[index + 1]; +< for (int i = 0; i < chars.length; ++i) { +--- +> for (int i = 0; i <= index; ++i) { +175c292 +< return ((StringBuilder) app).append(chars); +--- +> return ((StringBuilder) app).append(chars, 0, index + 1); +178c295 +< return ((StringBuffer) app).append(chars); +--- +> return ((StringBuffer) app).append(chars, 0, index + 1); +180,181c297,298 +< for (char c : chars) { +< app.append(c); +--- +> for (int i = 0; i <= index; ++i) { +> app.append(chars[i]); +193,198c310,315 +< * Returns +< * PLUS_ZERO iff v is 0.0 +< * MINUS_ZERO iff v is -0.0 +< * PLUS_INF iff v is POSITIVE_INFINITY +< * MINUS_INF iff v is NEGATIVE_INFINITY +< * NAN iff v is NaN +--- +> Returns +> PLUS_ZERO iff v is 0.0 +> MINUS_ZERO iff v is -0.0 +> PLUS_INF iff v is POSITIVE_INFINITY +> MINUS_INF iff v is NEGATIVE_INFINITY +> NAN iff v is NaN +202,208c319,325 +< * For full details see references [2] and [1]. +< * +< * For finite v != 0, determine integers c and q such that +< * |v| = c 2^q and +< * Q_MIN <= q <= Q_MAX and +< * either 2^(P-1) <= c < 2^P (normal) +< * or 0 < c < 2^(P-1) and q = Q_MIN (subnormal) +--- +> For full details see references [2] and [1]. +> +> For finite v != 0, determine integers c and q such that +> |v| = c 2^q and +> Q_MIN <= q <= Q_MAX and +> either 2^(P-1) <= c < 2^P (normal) +> or 0 < c < 2^(P-1) and q = Q_MIN (subnormal) +219c336 +< /* normal value. Here mq = -q */ +--- +> // normal value. Here mq = -q +222c339 +< /* The fast path discussed in section 8.3 of [1] */ +--- +> // The fast path discussed in section 8.2 of [1]. +232c349 +< /* subnormal value */ +--- +> // subnormal value +247,261c364,378 +< * The skeleton corresponds to figure 7 of [1]. +< * The efficient computations are those summarized in figure 9. +< * +< * Here's a correspondence between Java names and names in [1], +< * expressed as approximate LaTeX source code and informally. +< * Other names are identical. +< * cb: \bar{c} "c-bar" +< * cbr: \bar{c}_r "c-bar-r" +< * cbl: \bar{c}_l "c-bar-l" +< * +< * vb: \bar{v} "v-bar" +< * vbr: \bar{v}_r "v-bar-r" +< * vbl: \bar{v}_l "v-bar-l" +< * +< * rop: r_o' "r-o-prime" +--- +> The skeleton corresponds to figure 4 of [1]. +> The efficient computations are those summarized in figure 7. +> +> Here's a correspondence between Java names and names in [1], +> expressed as approximate LaTeX source code and informally. +> Other names are identical. +> cb: \bar{c} "c-bar" +> cbr: \bar{c}_r "c-bar-r" +> cbl: \bar{c}_l "c-bar-l" +> +> vb: \bar{v} "v-bar" +> vbr: \bar{v}_r "v-bar-r" +> vbl: \bar{v}_l "v-bar-l" +> +> rop: r_o' "r-o-prime" +269,271c386,388 +< * flog10pow2(e) = floor(log_10(2^e)) +< * flog10threeQuartersPow2(e) = floor(log_10(3/4 2^e)) +< * flog2pow10(e) = floor(log_2(10^e)) +--- +> flog10pow2(e) = floor(log_10(2^e)) +> flog10threeQuartersPow2(e) = floor(log_10(3/4 2^e)) +> flog2pow10(e) = floor(log_2(10^e)) +274c391 +< /* regular spacing */ +--- +> // regular spacing +278c395 +< /* irregular spacing */ +--- +> // irregular spacing +284c401 +< /* g1 and g0 are as in section 9.8.3 of [1], so g = g1 2^63 + g0 */ +--- +> // g1 and g0 are as in section 9.9.3 of [1], so g = g1 2^63 + g0 +295,303c412,420 +< * For n = 17, m = 1 the table in section 10 of [1] shows +< * s' = floor(s / 10) = floor(s 115_292_150_460_684_698 / 2^60) +< * = floor(s 115_292_150_460_684_698 2^4 / 2^64) +< * +< * sp10 = 10 s' +< * tp10 = 10 t' +< * upin iff u' = sp10 10^k in Rv +< * wpin iff w' = tp10 10^k in Rv +< * See section 9.3 of [1]. +--- +> For n = 17, m = 1 the table in section 10 of [1] shows +> s' = floor(s / 10) = floor(s 115_292_150_460_684_698 / 2^60) +> = floor(s 115_292_150_460_684_698 2^4 / 2^64) +> +> sp10 = 10 s' +> tp10 = 10 t' +> upin iff u' = sp10 10^k in Rv +> wpin iff w' = tp10 10^k in Rv +> See section 9.4 of [1]. +315,318c432,435 +< * 10 <= s < 100 or s >= 100 and u', w' not in Rv +< * uin iff u = s 10^k in Rv +< * win iff w = t 10^k in Rv +< * See section 9.3 of [1]. +--- +> 10 <= s < 100 or s >= 100 and u', w' not in Rv +> uin iff u = s 10^k in Rv +> win iff w = t 10^k in Rv +> See section 9.4 of [1]. +324c441 +< /* Exactly one of u or w lies in Rv */ +--- +> // Exactly one of u or w lies in Rv. +328,329c445,446 +< * Both u and w lie in Rv: determine the one closest to v. +< * See section 9.3 of [1]. +--- +> Both u and w lie in Rv: determine the one closest to v. +> See section 9.4 of [1]. +336,337c453,454 +< * Computes rop(cp g 2^(-127)), where g = g1 2^63 + g0 +< * See section 9.9 and figure 8 of [1]. +--- +> Computes rop(cp g 2^(-127)), where g = g1 2^63 + g0 +> See section 9.10 and figure 5 of [1]. +349c466 +< * Formats the decimal f 10^e. +--- +> Formats the decimal f 10^e. +353,356c470,473 +< * For details not discussed here see section 10 of [1]. +< * +< * Determine len such that +< * 10^(len-1) <= f < 10^len +--- +> For details not discussed here see section 10 of [1]. +> +> Determine len such that +> 10^(len-1) <= f < 10^len +364,367c481,484 +< * Let fp and ep be the original f and e, respectively. +< * Transform f and e to ensure +< * 10^(H-1) <= f < 10^H +< * fp 10^ep = f 10^(e-H) = 0.f 10^e +--- +> Let fp and ep be the original f and e, respectively. +> Transform f and e to ensure +> 10^(H-1) <= f < 10^H +> fp 10^ep = f 10^(e-H) = 0.f 10^e +373,384c490,501 +< * The toChars?() methods perform left-to-right digits extraction +< * using ints, provided that the arguments are limited to 8 digits. +< * Therefore, split the H = 17 digits of f into: +< * h = the most significant digit of f +< * m = the next 8 most significant digits of f +< * l = the last 8, least significant digits of f +< * +< * For n = 17, m = 8 the table in section 10 of [1] shows +< * floor(f / 10^8) = floor(193_428_131_138_340_668 f / 2^84) = +< * floor(floor(193_428_131_138_340_668 f / 2^64) / 2^20) +< * and for n = 9, m = 8 +< * floor(hm / 10^8) = floor(1_441_151_881 hm / 2^57) +--- +> The toChars?() methods perform left-to-right digits extraction +> using ints, provided that the arguments are limited to 8 digits. +> Therefore, split the H = 17 digits of f into: +> h = the most significant digit of f +> m = the next 8 most significant digits of f +> l = the last 8, least significant digits of f +> +> For n = 17, m = 8 the table in section 10 of [1] shows +> floor(f / 10^8) = floor(193_428_131_138_340_668 f / 2^84) = +> floor(floor(193_428_131_138_340_668 f / 2^64) / 2^20) +> and for n = 9, m = 8 +> floor(hm / 10^8) = floor(1_441_151_881 hm / 2^57) +402,404c519,521 +< * 0 < e <= 7: plain format without leading zeroes. +< * Left-to-right digits extraction: +< * algorithm 1 in [3], with b = 10, k = 8, n = 28. +--- +> 0 < e <= 7: plain format without leading zeroes. +> Left-to-right digits extraction: +> algorithm 1 in [3], with b = 10, k = 8, n = 28. +426c543 +< /* -3 < e <= 0: plain format with leading zeroes */ +--- +> // -3 < e <= 0: plain format with leading zeroes. +439c556 +< /* -3 >= e | e > 7: computerized scientific notation */ +--- +> // -3 >= e | e > 7: computerized scientific notation +457,458c574,575 +< * Left-to-right digits extraction: +< * algorithm 1 in [3], with b = 10, k = 8, n = 28. +--- +> Left-to-right digits extraction: +> algorithm 1 in [3], with b = 10, k = 8, n = 28. +472c589 +< /* ... but do not remove the one directly to the right of '.' */ +--- +> // ... but do not remove the one directly to the right of '.' +480,485c597,602 +< * Algorithm 1 in [3] needs computation of +< * floor((a + 1) 2^n / b^k) - 1 +< * with a < 10^8, b = 10, k = 8, n = 28. +< * Noting that +< * (a + 1) 2^n <= 10^8 2^28 < 10^17 +< * For n = 17, m = 8 the table in section 10 of [1] leads to: +--- +> Algorithm 1 in [3] needs computation of +> floor((a + 1) 2^n / b^k) - 1 +> with a < 10^8, b = 10, k = 8, n = 28. +> Noting that +> (a + 1) 2^n <= 10^8 2^28 < 10^17 +> For n = 17, m = 8 the table in section 10 of [1] leads to: +505,506c622,623 +< * For n = 3, m = 2 the table in section 10 of [1] shows +< * floor(e / 100) = floor(1_311 e / 2^17) +--- +> For n = 3, m = 2 the table in section 10 of [1] shows +> floor(e / 100) = floor(1_311 e / 2^17) +513,514c630,631 +< * For n = 2, m = 1 the table in section 10 of [1] shows +< * floor(e / 10) = floor(103 e / 2^10) +--- +> For n = 2, m = 1 the table in section 10 of [1] shows +> floor(e / 10) = floor(103 e / 2^10) +529c646 +< /* Using the deprecated constructor enhances performance */ +--- +> // Using the deprecated constructor enhances performance. diff --git a/src/python/merge_conflict_analysis_diffs/845/git_hires_merge/diff_FloatToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/git_hires_merge/diff_FloatToDecimal.java.txt new file mode 100644 index 0000000000..846513122f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/git_hires_merge/diff_FloatToDecimal.java.txt @@ -0,0 +1,577 @@ +2,3c2 +< * Copyright (c) 2021, 2022, Oracle and/or its affiliates. All rights reserved. +< * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. +--- +> * Copyright 2018-2020 Raffaello Giulietti +5,9c4,9 +< * This code is free software; you can redistribute it and/or modify it +< * under the terms of the GNU General Public License version 2 only, as +< * published by the Free Software Foundation. Oracle designates this +< * particular file as subject to the "Classpath" exception as provided +< * by Oracle in the LICENSE file that accompanied this code. +--- +> * Permission is hereby granted, free of charge, to any person obtaining a copy +> * of this software and associated documentation files (the "Software"), to deal +> * in the Software without restriction, including without limitation the rights +> * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> * copies of the Software, and to permit persons to whom the Software is +> * furnished to do so, subject to the following conditions: +11,15c11,12 +< * This code is distributed in the hope that it will be useful, but WITHOUT +< * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +< * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +< * version 2 for more details (a copy is included in the LICENSE file that +< * accompanied this code). +--- +> * The above copyright notice and this permission notice shall be included in +> * all copies or substantial portions of the Software. +17,23c14,20 +< * You should have received a copy of the GNU General Public License version +< * 2 along with this work; if not, write to the Free Software Foundation, +< * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. +< * +< * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA +< * or visit www.oracle.com if you need additional information or have any +< * questions. +--- +> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +> * THE SOFTWARE. +30,32d26 +< import static java.lang.Float.floatToRawIntBits; +< import static java.lang.Integer.numberOfLeadingZeros; +< +39a34,36 +> import static java.lang.Float.floatToRawIntBits; +> import static java.lang.Integer.numberOfLeadingZeros; +> +41a39,40 +> * +> * @author Raffaello Giulietti +45,56c44,55 +< * For full details about this code see the following references: +< * +< * [1] Giulietti, "The Schubfach way to render doubles", +< * https://drive.google.com/file/d/1gp5xv4CAa78SVgCeWfGqqI4FfYYYuNFb +< * +< * [2] IEEE Computer Society, "IEEE Standard for Floating-Point Arithmetic" +< * +< * [3] Bouvier & Zimmermann, "Division-Free Binary-to-Decimal Conversion" +< * +< * Divisions are avoided altogether for the benefit of those architectures +< * that do not provide specific machine instructions or where they are slow. +< * This is discussed in section 10 of [1]. +--- +> For full details about this code see the following references: +> +> [1] Giulietti, "The Schubfach way to render doubles", +> https://drive.google.com/open?id=1luHhyQF9zKlM8yJ1nebU0OgVYhfC6CBN +> +> [2] IEEE Computer Society, "IEEE Standard for Floating-Point Arithmetic" +> +> [3] Bouvier & Zimmermann, "Division-Free Binary-to-Decimal Conversion" +> +> Divisions are avoided altogether for the benefit of those architectures +> that do not provide specific machine instructions or where they are slow. +> This is discussed in section 10 of [1]. +59c58,60 +< /* The precision in bits */ +--- +> // Sources with the license are here: https://github.com/c4f7fcce9cb06515/Schubfach/blob/3c92d3c9b1fead540616c918cdfef432bca53dfa/todec/src/math/FloatToDecimal.java +> +> // The precision in bits. +62c63 +< /* Exponent width in bits */ +--- +> // Exponent width in bits. +65,66c66,67 +< /* Minimum value of the exponent: -(2^(W-1)) - P + 3 */ +< static final int Q_MIN = (-1 << (W - 1)) - P + 3; +--- +> // Minimum value of the exponent: -(2^(W-1)) - P + 3. +> static final int Q_MIN = (-1 << W - 1) - P + 3; +68,69c69,70 +< /* Maximum value of the exponent: 2^(W-1) - P */ +< static final int Q_MAX = (1 << (W - 1)) - P; +--- +> // Maximum value of the exponent: 2^(W-1) - P. +> static final int Q_MAX = (1 << W - 1) - P; +71c72 +< /* 10^(E_MIN - 1) <= MIN_VALUE < 10^E_MIN */ +--- +> // 10^(E_MIN - 1) <= MIN_VALUE < 10^E_MIN +74c75 +< /* 10^(E_MAX - 1) <= MAX_VALUE < 10^E_MAX */ +--- +> // 10^(E_MAX - 1) <= MAX_VALUE < 10^E_MAX +77c78 +< /* Threshold to detect tiny values, as in section 8.2.1 of [1] */ +--- +> // Threshold to detect tiny values, as in section 8.1.1 of [1] +80c81 +< /* The minimum and maximum k, as in section 8 of [1] */ +--- +> // The minimum and maximum k, as in section 8 of [1] +84c85 +< /* H is as in section 8.1 of [1] */ +--- +> // H is as in section 8 of [1]. +87,88c88,89 +< /* Minimum value of the significand of a normal value: 2^(P-1) */ +< private static final int C_MIN = 1 << (P - 1); +--- +> // Minimum value of the significand of a normal value: 2^(P-1). +> private static final int C_MIN = 1 << P - 1; +90c91 +< /* Mask to extract the biased exponent */ +--- +> // Mask to extract the biased exponent. +93,94c94,95 +< /* Mask to extract the fraction bits */ +< private static final int T_MASK = (1 << (P - 1)) - 1; +--- +> // Mask to extract the fraction bits. +> private static final int T_MASK = (1 << P - 1) - 1; +96c97 +< /* Used in rop() */ +--- +> // Used in rop(). +99c100 +< /* Used for left-to-tight digit extraction */ +--- +> // Used for left-to-tight digit extraction. +102,107c103,112 +< private static final int NON_SPECIAL = 0; +< private static final int PLUS_ZERO = 1; +< private static final int MINUS_ZERO = 2; +< private static final int PLUS_INF = 3; +< private static final int MINUS_INF = 4; +< private static final int NAN = 5; +--- +> private static final int NON_SPECIAL = 0; +> private static final int PLUS_ZERO = 1; +> private static final int MINUS_ZERO = 2; +> private static final int PLUS_INF = 3; +> private static final int MINUS_INF = 4; +> private static final int NAN = 5; +> +> // For thread-safety, each thread gets its own instance of this class. +> private static final ThreadLocal threadLocal = +> ThreadLocal.withInitial(FloatToDecimal::new); +110,114c115,119 +< * Room for the longer of the forms +< * -ddddd.dddd H + 2 characters +< * -0.00ddddddddd H + 5 characters +< * -d.ddddddddE-ee H + 6 characters +< * where there are H digits d +--- +> Room for the longer of the forms +> -ddddd.dddd H + 2 characters +> -0.00ddddddddd H + 5 characters +> -d.ddddddddE-ee H + 6 characters +> where there are H digits d +116c121 +< public static final int MAX_CHARS = H + 6; +--- +> public final int MAX_CHARS = H + 6; +117a123 +> // Numerical results are created here... +120c126,129 +< /* Index into bytes of rightmost valid character */ +--- +> // ... and copied here in appendTo() +> private final char[] chars = new char[MAX_CHARS]; +> +> // Index into buf of rightmost valid character. +127,128c136,168 +< * Returns a string representation of the {@code float} +< * argument. All characters mentioned below are ASCII characters. +--- +> * Returns a string rendering of the {@code float} argument. +> * +> *

    The characters of the result are all drawn from the ASCII set. +> *

      +> *
    • Any NaN, whether quiet or signaling, is rendered as +> * {@code "NaN"}, regardless of the sign bit. +> *
    • The infinities +∞ and -∞ are rendered as +> * {@code "Infinity"} and {@code "-Infinity"}, respectively. +> *
    • The positive and negative zeroes are rendered as +> * {@code "0.0"} and {@code "-0.0"}, respectively. +> *
    • A finite negative {@code v} is rendered as the sign +> * '{@code -}' followed by the rendering of the magnitude -{@code v}. +> *
    • A finite positive {@code v} is rendered in two stages: +> *
        +> *
      • Selection of a decimal: A well-defined +> * decimal dv is selected +> * to represent {@code v}. +> *
      • Formatting as a string: The decimal +> * dv is formatted as a string, +> * either in plain or in computerized scientific notation, +> * depending on its value. +> *
      +> *
    +> * +> *

    A decimal is a number of the form +> * d×10i +> * for some (unique) integers d > 0 and i such that +> * d is not a multiple of 10. +> * These integers are the significand and +> * the exponent, respectively, of the decimal. +> * The length of the decimal is the (unique) +> * integer n meeting +> * 10n-1d < 10n. +130,132c170,246 +< * @param v the {@code float} to be converted. +< * @return a string representation of the argument. +< * @see Float#toString(float) +--- +> *

    The decimal dv +> * for a finite positive {@code v} is defined as follows: +> *

      +> *
    • Let R be the set of all decimals that round to {@code v} +> * according to the usual round-to-closest rule of +> * IEEE 754 floating-point arithmetic. +> *
    • Let m be the minimal length over all decimals in R. +> *
    • When m ≥ 2, let T be the set of all decimals +> * in R with length m. +> * Otherwise, let T be the set of all decimals +> * in R with length 1 or 2. +> *
    • Define dv as +> * the decimal in T that is closest to {@code v}. +> * Or if there are two such decimals in T, +> * select the one with the even significand (there is exactly one). +> *
    +> * +> *

    The (uniquely) selected decimal dv +> * is then formatted. +> * +> *

    Let d, i and n be the significand, exponent and +> * length of dv, respectively. +> * Further, let e = n + i - 1 and let +> * d1dn +> * be the usual decimal expansion of the significand. +> * Note that d1 ≠ 0 ≠ dn. +> *

      +> *
    • Case -3 ≤ e < 0: +> * dv is formatted as +> * 0.00d1dn, +> * where there are exactly -(n + i) zeroes between +> * the decimal point and d1. +> * For example, 123 × 10-4 is formatted as +> * {@code 0.0123}. +> *
    • Case 0 ≤ e < 7: +> *
        +> *
      • Subcase i ≥ 0: +> * dv is formatted as +> * d1dn00.0, +> * where there are exactly i zeroes +> * between dn and the decimal point. +> * For example, 123 × 102 is formatted as +> * {@code 12300.0}. +> *
      • Subcase i < 0: +> * dv is formatted as +> * d1dn+i.dn+i+1dn. +> * There are exactly -i digits to the right of +> * the decimal point. +> * For example, 123 × 10-1 is formatted as +> * {@code 12.3}. +> *
      +> *
    • Case e < -3 or e ≥ 7: +> * computerized scientific notation is used to format +> * dv. +> * Here e is formatted as by {@link Integer#toString(int)}. +> *
        +> *
      • Subcase n = 1: +> * dv is formatted as +> * d1.0Ee. +> * For example, 1 × 1023 is formatted as +> * {@code 1.0E23}. +> *
      • Subcase n > 1: +> * dv is formatted as +> * d1.d2dnEe. +> * For example, 123 × 10-21 is formatted as +> * {@code 1.23E-19}. +> *
      +> *
    +> * +> * @param v the {@code float} to be rendered. +> * @return a string rendering of the argument. +135c249 +< return new FloatToDecimal().toDecimalString(v); +--- +> return threadLocalInstance().toDecimalString(v); +151c265,269 +< return new FloatToDecimal().appendDecimalTo(v, app); +--- +> return threadLocalInstance().appendDecimalTo(v, app); +> } +> +> private static FloatToDecimal threadLocalInstance() { +> return threadLocal.get(); +169,170c287 +< char[] chars = new char[index + 1]; +< for (int i = 0; i < chars.length; ++i) { +--- +> for (int i = 0; i <= index; ++i) { +174c291 +< return ((StringBuilder) app).append(chars); +--- +> return ((StringBuilder) app).append(chars, 0, index + 1); +177c294 +< return ((StringBuffer) app).append(chars); +--- +> return ((StringBuffer) app).append(chars, 0, index + 1); +179,180c296,297 +< for (char c : chars) { +< app.append(c); +--- +> for (int i = 0; i <= index; ++i) { +> app.append(chars[i]); +192,197c309,314 +< * Returns +< * PLUS_ZERO iff v is 0.0 +< * MINUS_ZERO iff v is -0.0 +< * PLUS_INF iff v is POSITIVE_INFINITY +< * MINUS_INF iff v is NEGATIVE_INFINITY +< * NAN iff v is NaN +--- +> Returns +> PLUS_ZERO iff v is 0.0 +> MINUS_ZERO iff v is -0.0 +> PLUS_INF iff v is POSITIVE_INFINITY +> MINUS_INF iff v is NEGATIVE_INFINITY +> NAN iff v is NaN +201,207c318,324 +< * For full details see references [2] and [1]. +< * +< * For finite v != 0, determine integers c and q such that +< * |v| = c 2^q and +< * Q_MIN <= q <= Q_MAX and +< * either 2^(P-1) <= c < 2^P (normal) +< * or 0 < c < 2^(P-1) and q = Q_MIN (subnormal) +--- +> For full details see references [2] and [1]. +> +> For finite v != 0, determine integers c and q such that +> |v| = c 2^q and +> Q_MIN <= q <= Q_MAX and +> either 2^(P-1) <= c < 2^P (normal) +> or 0 < c < 2^(P-1) and q = Q_MIN (subnormal) +218c335 +< /* normal value. Here mq = -q */ +--- +> // normal value. Here mq = -q +221c338 +< /* The fast path discussed in section 8.3 of [1] */ +--- +> // The fast path discussed in section 8.2 of [1]. +231c348 +< /* subnormal value */ +--- +> // subnormal value +246,261c363,378 +< * The skeleton corresponds to figure 7 of [1]. +< * The efficient computations are those summarized in figure 9. +< * Also check the appendix. +< * +< * Here's a correspondence between Java names and names in [1], +< * expressed as approximate LaTeX source code and informally. +< * Other names are identical. +< * cb: \bar{c} "c-bar" +< * cbr: \bar{c}_r "c-bar-r" +< * cbl: \bar{c}_l "c-bar-l" +< * +< * vb: \bar{v} "v-bar" +< * vbr: \bar{v}_r "v-bar-r" +< * vbl: \bar{v}_l "v-bar-l" +< * +< * rop: r_o' "r-o-prime" +--- +> The skeleton corresponds to figure 4 of [1]. +> The efficient computations are those summarized in figure 7. +> Also check the appendix. +> +> Here's a correspondence between Java names and names in [1], +> expressed as approximate LaTeX source code and informally. +> Other names are identical. +> cb: \bar{c} "c-bar" +> cbr: \bar{c}_r "c-bar-r" +> cbl: \bar{c}_l "c-bar-l" +> +> vb: \bar{v} "v-bar" +> vbr: \bar{v}_r "v-bar-r" +> vbl: \bar{v}_l "v-bar-l" +> +> rop: r_o' "r-o-prime" +269,271c386,388 +< * flog10pow2(e) = floor(log_10(2^e)) +< * flog10threeQuartersPow2(e) = floor(log_10(3/4 2^e)) +< * flog2pow10(e) = floor(log_2(10^e)) +--- +> flog10pow2(e) = floor(log_10(2^e)) +> flog10threeQuartersPow2(e) = floor(log_10(3/4 2^e)) +> flog2pow10(e) = floor(log_2(10^e)) +274c391 +< /* regular spacing */ +--- +> // regular spacing +278c395 +< /* irregular spacing */ +--- +> // irregular spacing0 +284c401 +< /* g is as in the appendix */ +--- +> // g is as in the appendix +294,301c411,418 +< * For n = 9, m = 1 the table in section 10 of [1] shows +< * s' = floor(s / 10) = floor(s 1_717_986_919 / 2^34) +< * +< * sp10 = 10 s' +< * tp10 = 10 t' +< * upin iff u' = sp10 10^k in Rv +< * wpin iff w' = tp10 10^k in Rv +< * See section 9.3 of [1]. +--- +> For n = 9, m = 1 the table in section 10 of [1] shows +> s' = floor(s / 10) = floor(s 1_717_986_919 / 2^34) +> +> sp10 = 10 s' +> tp10 = 10 t' +> upin iff u' = sp10 10^k in Rv +> wpin iff w' = tp10 10^k in Rv +> See section 9.4 of [1]. +313,316c430,433 +< * 10 <= s < 100 or s >= 100 and u', w' not in Rv +< * uin iff u = s 10^k in Rv +< * win iff w = t 10^k in Rv +< * See section 9.3 of [1]. +--- +> 10 <= s < 100 or s >= 100 and u', w' not in Rv +> uin iff u = s 10^k in Rv +> win iff w = t 10^k in Rv +> See section 9.4 of [1]. +322c439 +< /* Exactly one of u or w lies in Rv */ +--- +> // Exactly one of u or w lies in Rv. +326,327c443,444 +< * Both u and w lie in Rv: determine the one closest to v. +< * See section 9.3 of [1]. +--- +> Both u and w lie in Rv: determine the one closest to v. +> See section 9.4 of [1]. +334,335c451,452 +< * Computes rop(cp g 2^(-95)) +< * See appendix and figure 11 of [1]. +--- +> Computes rop(cp g 2^(-95)) +> See appendix and figure 8 of [1]. +344c461 +< * Formats the decimal f 10^e. +--- +> Formats the decimal f 10^e. +348,351c465,468 +< * For details not discussed here see section 10 of [1]. +< * +< * Determine len such that +< * 10^(len-1) <= f < 10^len +--- +> For details not discussed here see section 10 of [1]. +> +> Determine len such that +> 10^(len-1) <= f < 10^len +359,362c476,479 +< * Let fp and ep be the original f and e, respectively. +< * Transform f and e to ensure +< * 10^(H-1) <= f < 10^H +< * fp 10^ep = f 10^(e-H) = 0.f 10^e +--- +> Let fp and ep be the original f and e, respectively. +> Transform f and e to ensure +> 10^(H-1) <= f < 10^H +> fp 10^ep = f 10^(e-H) = 0.f 10^e +364c481 +< f *= (int)pow10(H - len); +--- +> f *= pow10(H - len); +368,375c485,492 +< * The toChars?() methods perform left-to-right digits extraction +< * using ints, provided that the arguments are limited to 8 digits. +< * Therefore, split the H = 9 digits of f into: +< * h = the most significant digit of f +< * l = the last 8, least significant digits of f +< * +< * For n = 9, m = 8 the table in section 10 of [1] shows +< * floor(f / 10^8) = floor(1_441_151_881 f / 2^57) +--- +> The toChars?() methods perform left-to-right digits extraction +> using ints, provided that the arguments are limited to 8 digits. +> Therefore, split the H = 9 digits of f into: +> h = the most significant digit of f +> l = the last 8, least significant digits of f +> +> For n = 9, m = 8 the table in section 10 of [1] shows +> floor(f / 10^8) = floor(1_441_151_881 f / 2^57) +391,393c508,510 +< * 0 < e <= 7: plain format without leading zeroes. +< * Left-to-right digits extraction: +< * algorithm 1 in [3], with b = 10, k = 8, n = 28. +--- +> 0 < e <= 7: plain format without leading zeroes. +> Left-to-right digits extraction: +> algorithm 1 in [3], with b = 10, k = 8, n = 28. +415c532 +< /* -3 < e <= 0: plain format with leading zeroes */ +--- +> // -3 < e <= 0: plain format with leading zeroes. +428c545 +< /* -3 >= e | e > 7: computerized scientific notation */ +--- +> // -3 >= e | e > 7: computerized scientific notation +439,440c556,557 +< * Left-to-right digits extraction: +< * algorithm 1 in [3], with b = 10, k = 8, n = 28. +--- +> Left-to-right digits extraction: +> algorithm 1 in [3], with b = 10, k = 8, n = 28. +454c571 +< /* ... but do not remove the one directly to the right of '.' */ +--- +> // ... but do not remove the one directly to the right of '.' +462,467c579,584 +< * Algorithm 1 in [3] needs computation of +< * floor((a + 1) 2^n / b^k) - 1 +< * with a < 10^8, b = 10, k = 8, n = 28. +< * Noting that +< * (a + 1) 2^n <= 10^8 2^28 < 10^17 +< * For n = 17, m = 8 the table in section 10 of [1] leads to: +--- +> Algorithm 1 in [3] needs computation of +> floor((a + 1) 2^n / b^k) - 1 +> with a < 10^8, b = 10, k = 8, n = 28. +> Noting that +> (a + 1) 2^n <= 10^8 2^28 < 10^17 +> For n = 17, m = 8 the table in section 10 of [1] leads to: +485,486c602,603 +< * For n = 2, m = 1 the table in section 10 of [1] shows +< * floor(e / 10) = floor(103 e / 2^10) +--- +> For n = 2, m = 1 the table in section 10 of [1] shows +> floor(e / 10) = floor(103 e / 2^10) +501c618 +< /* Using the deprecated constructor enhances performance */ +--- +> // Using the deprecated constructor enhances performance. +507a625 +> diff --git a/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort/diff_DoubleToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort/diff_DoubleToDecimal.java.txt new file mode 100644 index 0000000000..ed277c312b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort/diff_DoubleToDecimal.java.txt @@ -0,0 +1,22 @@ +27,30d26 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/io/schubfach/DoubleToDecimal.java +< import static java.lang.Double.doubleToRawLongBits; +< import static java.lang.Long.numberOfLeadingZeros; +< +38,49c34 +< ||||||| d63cef092:src/main/java/com/fasterxml/jackson/core/io/schubfach/DoubleToDecimal.java +< import static java.lang.Double.*; +< import static java.lang.Long.*; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.*; +< ======= +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10pow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10threeQuartersPow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog2pow10; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g0; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g1; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.multiplyHigh; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.pow10; +--- +> +52d36 +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/io/schubfach/DoubleToDecimal.java diff --git a/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort/diff_FloatToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort/diff_FloatToDecimal.java.txt new file mode 100644 index 0000000000..3cc2b3daf2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort/diff_FloatToDecimal.java.txt @@ -0,0 +1,21 @@ +27,30d26 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/io/schubfach/FloatToDecimal.java +< import static java.lang.Float.floatToRawIntBits; +< import static java.lang.Integer.numberOfLeadingZeros; +< +37,47c33 +< ||||||| d63cef092:src/main/java/com/fasterxml/jackson/core/io/schubfach/FloatToDecimal.java +< import static java.lang.Float.*; +< import static java.lang.Integer.*; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.*; +< ======= +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10pow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10threeQuartersPow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog2pow10; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g1; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.multiplyHigh; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.pow10; +--- +> +50d35 +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/io/schubfach/FloatToDecimal.java diff --git a/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_adjacent/diff_DoubleToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_adjacent/diff_DoubleToDecimal.java.txt new file mode 100644 index 0000000000..8d03f594b1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_adjacent/diff_DoubleToDecimal.java.txt @@ -0,0 +1,585 @@ +2,3c2 +< * Copyright (c) 2021, 2022, Oracle and/or its affiliates. All rights reserved. +< * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. +--- +> * Copyright 2018-2020 Raffaello Giulietti +5,9c4,9 +< * This code is free software; you can redistribute it and/or modify it +< * under the terms of the GNU General Public License version 2 only, as +< * published by the Free Software Foundation. Oracle designates this +< * particular file as subject to the "Classpath" exception as provided +< * by Oracle in the LICENSE file that accompanied this code. +--- +> * Permission is hereby granted, free of charge, to any person obtaining a copy +> * of this software and associated documentation files (the "Software"), to deal +> * in the Software without restriction, including without limitation the rights +> * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> * copies of the Software, and to permit persons to whom the Software is +> * furnished to do so, subject to the following conditions: +11,15c11,12 +< * This code is distributed in the hope that it will be useful, but WITHOUT +< * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +< * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +< * version 2 for more details (a copy is included in the LICENSE file that +< * accompanied this code). +--- +> * The above copyright notice and this permission notice shall be included in +> * all copies or substantial portions of the Software. +17,23c14,20 +< * You should have received a copy of the GNU General Public License version +< * 2 along with this work; if not, write to the Free Software Foundation, +< * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. +< * +< * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA +< * or visit www.oracle.com if you need additional information or have any +< * questions. +--- +> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +> * THE SOFTWARE. +30,32d26 +< import static java.lang.Double.doubleToRawLongBits; +< import static java.lang.Long.numberOfLeadingZeros; +< +40a35,37 +> import static java.lang.Double.doubleToRawLongBits; +> import static java.lang.Long.numberOfLeadingZeros; +> +42a40,41 +> * +> * @author Raffaello Giulietti +46,57c45,56 +< * For full details about this code see the following references: +< * +< * [1] Giulietti, "The Schubfach way to render doubles", +< * https://drive.google.com/file/d/1gp5xv4CAa78SVgCeWfGqqI4FfYYYuNFb +< * +< * [2] IEEE Computer Society, "IEEE Standard for Floating-Point Arithmetic" +< * +< * [3] Bouvier & Zimmermann, "Division-Free Binary-to-Decimal Conversion" +< * +< * Divisions are avoided altogether for the benefit of those architectures +< * that do not provide specific machine instructions or where they are slow. +< * This is discussed in section 10 of [1]. +--- +> For full details about this code see the following references: +> +> [1] Giulietti, "The Schubfach way to render doubles", +> https://drive.google.com/open?id=1luHhyQF9zKlM8yJ1nebU0OgVYhfC6CBN +> +> [2] IEEE Computer Society, "IEEE Standard for Floating-Point Arithmetic" +> +> [3] Bouvier & Zimmermann, "Division-Free Binary-to-Decimal Conversion" +> +> Divisions are avoided altogether for the benefit of those architectures +> that do not provide specific machine instructions or where they are slow. +> This is discussed in section 10 of [1]. +60c59,61 +< /* The precision in bits */ +--- +> // Sources with the license are here: https://github.com/c4f7fcce9cb06515/Schubfach/blob/3c92d3c9b1fead540616c918cdfef432bca53dfa/todec/src/math/FloatToDecimal.java +> +> // The precision in bits. +63c64 +< /* Exponent width in bits */ +--- +> // Exponent width in bits. +66,67c67,68 +< /* Minimum value of the exponent: -(2^(W-1)) - P + 3 */ +< static final int Q_MIN = (-1 << (W - 1)) - P + 3; +--- +> // Minimum value of the exponent: -(2^(W-1)) - P + 3. +> static final int Q_MIN = (-1 << W - 1) - P + 3; +69,70c70,71 +< /* Maximum value of the exponent: 2^(W-1) - P */ +< static final int Q_MAX = (1 << (W - 1)) - P; +--- +> // Maximum value of the exponent: 2^(W-1) - P. +> static final int Q_MAX = (1 << W - 1) - P; +72c73 +< /* 10^(E_MIN - 1) <= MIN_VALUE < 10^E_MIN */ +--- +> // 10^(E_MIN - 1) <= MIN_VALUE < 10^E_MIN +75c76 +< /* 10^(E_MAX - 1) <= MAX_VALUE < 10^E_MAX */ +--- +> // 10^(E_MAX - 1) <= MAX_VALUE < 10^E_MAX +78c79 +< /* Threshold to detect tiny values, as in section 8.2.1 of [1] */ +--- +> // Threshold to detect tiny values, as in section 8.1.1 of [1] +81c82 +< /* The minimum and maximum k, as in section 8 of [1] */ +--- +> // The minimum and maximum k, as in section 8 of [1] +85c86 +< /* H is as in section 8.1 of [1] */ +--- +> // H is as in section 8 of [1]. +88,89c89,90 +< /* Minimum value of the significand of a normal value: 2^(P-1) */ +< private static final long C_MIN = 1L << (P - 1); +--- +> // Minimum value of the significand of a normal value: 2^(P-1). +> private static final long C_MIN = 1L << P - 1; +91c92 +< /* Mask to extract the biased exponent */ +--- +> // Mask to extract the biased exponent. +94,95c95,96 +< /* Mask to extract the fraction bits */ +< private static final long T_MASK = (1L << (P - 1)) - 1; +--- +> // Mask to extract the fraction bits. +> private static final long T_MASK = (1L << P - 1) - 1; +97c98 +< /* Used in rop() */ +--- +> // Used in rop(). +100c101 +< /* Used for left-to-tight digit extraction */ +--- +> // Used for left-to-tight digit extraction. +103,108c104,113 +< private static final int NON_SPECIAL = 0; +< private static final int PLUS_ZERO = 1; +< private static final int MINUS_ZERO = 2; +< private static final int PLUS_INF = 3; +< private static final int MINUS_INF = 4; +< private static final int NAN = 5; +--- +> private static final int NON_SPECIAL = 0; +> private static final int PLUS_ZERO = 1; +> private static final int MINUS_ZERO = 2; +> private static final int PLUS_INF = 3; +> private static final int MINUS_INF = 4; +> private static final int NAN = 5; +> +> // For thread-safety, each thread gets its own instance of this class. +> private static final ThreadLocal threadLocal = +> ThreadLocal.withInitial(DoubleToDecimal::new); +111,115c116,120 +< * Room for the longer of the forms +< * -ddddd.dddddddddddd H + 2 characters +< * -0.00ddddddddddddddddd H + 5 characters +< * -d.ddddddddddddddddE-eee H + 7 characters +< * where there are H digits d +--- +> Room for the longer of the forms +> -ddddd.dddddddddddd H + 2 characters +> -0.00ddddddddddddddddd H + 5 characters +> -d.ddddddddddddddddE-eee H + 7 characters +> where there are H digits d +117c122 +< public static final int MAX_CHARS = H + 7; +--- +> public final int MAX_CHARS = H + 7; +118a124 +> // Numerical results are created here... +121c127,130 +< /* Index into bytes of rightmost valid character */ +--- +> // ... and copied here in appendTo() +> private final char[] chars = new char[MAX_CHARS]; +> +> // Index into bytes of rightmost valid character. +128,129c137,189 +< * Returns a string representation of the {@code double} +< * argument. All characters mentioned below are ASCII characters. +--- +> * Returns a string rendering of the {@code double} argument. +> * +> *

    The characters of the result are all drawn from the ASCII set. +> *

      +> *
    • Any NaN, whether quiet or signaling, is rendered as +> * {@code "NaN"}, regardless of the sign bit. +> *
    • The infinities +∞ and -∞ are rendered as +> * {@code "Infinity"} and {@code "-Infinity"}, respectively. +> *
    • The positive and negative zeroes are rendered as +> * {@code "0.0"} and {@code "-0.0"}, respectively. +> *
    • A finite negative {@code v} is rendered as the sign +> * '{@code -}' followed by the rendering of the magnitude -{@code v}. +> *
    • A finite positive {@code v} is rendered in two stages: +> *
        +> *
      • Selection of a decimal: A well-defined +> * decimal dv is selected +> * to represent {@code v}. +> *
      • Formatting as a string: The decimal +> * dv is formatted as a string, +> * either in plain or in computerized scientific notation, +> * depending on its value. +> *
      +> *
    +> * +> *

    A decimal is a number of the form +> * d×10i +> * for some (unique) integers d > 0 and i such that +> * d is not a multiple of 10. +> * These integers are the significand and +> * the exponent, respectively, of the decimal. +> * The length of the decimal is the (unique) +> * integer n meeting +> * 10n-1d < 10n. +> * +> *

    The decimal dv +> * for a finite positive {@code v} is defined as follows: +> *

      +> *
    • Let R be the set of all decimals that round to {@code v} +> * according to the usual round-to-closest rule of +> * IEEE 754 floating-point arithmetic. +> *
    • Let m be the minimal length over all decimals in R. +> *
    • When m ≥ 2, let T be the set of all decimals +> * in R with length m. +> * Otherwise, let T be the set of all decimals +> * in R with length 1 or 2. +> *
    • Define dv as +> * the decimal in T that is closest to {@code v}. +> * Or if there are two such decimals in T, +> * select the one with the even significand (there is exactly one). +> *
    +> * +> *

    The (uniquely) selected decimal dv +> * is then formatted. +131,133c191,247 +< * @param v the {@code double} to be converted. +< * @return a string representation of the argument. +< * @see Double#toString(double) +--- +> *

    Let d, i and n be the significand, exponent and +> * length of dv, respectively. +> * Further, let e = n + i - 1 and let +> * d1dn +> * be the usual decimal expansion of the significand. +> * Note that d1 ≠ 0 ≠ dn. +> *

      +> *
    • Case -3 ≤ e < 0: +> * dv is formatted as +> * 0.00d1dn, +> * where there are exactly -(n + i) zeroes between +> * the decimal point and d1. +> * For example, 123 × 10-4 is formatted as +> * {@code 0.0123}. +> *
    • Case 0 ≤ e < 7: +> *
        +> *
      • Subcase i ≥ 0: +> * dv is formatted as +> * d1dn00.0, +> * where there are exactly i zeroes +> * between dn and the decimal point. +> * For example, 123 × 102 is formatted as +> * {@code 12300.0}. +> *
      • Subcase i < 0: +> * dv is formatted as +> * d1dn+i.dn+i+1dn. +> * There are exactly -i digits to the right of +> * the decimal point. +> * For example, 123 × 10-1 is formatted as +> * {@code 12.3}. +> *
      +> *
    • Case e < -3 or e ≥ 7: +> * computerized scientific notation is used to format +> * dv. +> * Here e is formatted as by {@link Integer#toString(int)}. +> *
        +> *
      • Subcase n = 1: +> * dv is formatted as +> * d1.0Ee. +> * For example, 1 × 1023 is formatted as +> * {@code 1.0E23}. +> *
      • Subcase n > 1: +> * dv is formatted as +> * d1.d2dnEe. +> * For example, 123 × 10-21 is formatted as +> * {@code 1.23E-19}. +> *
      +> *
    +> * +> * @param v the {@code double} to be rendered. +> * @return a string rendering of the argument. +136c250 +< return new DoubleToDecimal().toDecimalString(v); +--- +> return threadLocalInstance().toDecimalString(v); +152c266,270 +< return new DoubleToDecimal().appendDecimalTo(v, app); +--- +> return threadLocalInstance().appendDecimalTo(v, app); +> } +> +> private static DoubleToDecimal threadLocalInstance() { +> return threadLocal.get(); +170,171c288 +< char[] chars = new char[index + 1]; +< for (int i = 0; i < chars.length; ++i) { +--- +> for (int i = 0; i <= index; ++i) { +175c292 +< return ((StringBuilder) app).append(chars); +--- +> return ((StringBuilder) app).append(chars, 0, index + 1); +178c295 +< return ((StringBuffer) app).append(chars); +--- +> return ((StringBuffer) app).append(chars, 0, index + 1); +180,181c297,298 +< for (char c : chars) { +< app.append(c); +--- +> for (int i = 0; i <= index; ++i) { +> app.append(chars[i]); +193,198c310,315 +< * Returns +< * PLUS_ZERO iff v is 0.0 +< * MINUS_ZERO iff v is -0.0 +< * PLUS_INF iff v is POSITIVE_INFINITY +< * MINUS_INF iff v is NEGATIVE_INFINITY +< * NAN iff v is NaN +--- +> Returns +> PLUS_ZERO iff v is 0.0 +> MINUS_ZERO iff v is -0.0 +> PLUS_INF iff v is POSITIVE_INFINITY +> MINUS_INF iff v is NEGATIVE_INFINITY +> NAN iff v is NaN +202,208c319,325 +< * For full details see references [2] and [1]. +< * +< * For finite v != 0, determine integers c and q such that +< * |v| = c 2^q and +< * Q_MIN <= q <= Q_MAX and +< * either 2^(P-1) <= c < 2^P (normal) +< * or 0 < c < 2^(P-1) and q = Q_MIN (subnormal) +--- +> For full details see references [2] and [1]. +> +> For finite v != 0, determine integers c and q such that +> |v| = c 2^q and +> Q_MIN <= q <= Q_MAX and +> either 2^(P-1) <= c < 2^P (normal) +> or 0 < c < 2^(P-1) and q = Q_MIN (subnormal) +219c336 +< /* normal value. Here mq = -q */ +--- +> // normal value. Here mq = -q +222c339 +< /* The fast path discussed in section 8.3 of [1] */ +--- +> // The fast path discussed in section 8.2 of [1]. +232c349 +< /* subnormal value */ +--- +> // subnormal value +247,261c364,378 +< * The skeleton corresponds to figure 7 of [1]. +< * The efficient computations are those summarized in figure 9. +< * +< * Here's a correspondence between Java names and names in [1], +< * expressed as approximate LaTeX source code and informally. +< * Other names are identical. +< * cb: \bar{c} "c-bar" +< * cbr: \bar{c}_r "c-bar-r" +< * cbl: \bar{c}_l "c-bar-l" +< * +< * vb: \bar{v} "v-bar" +< * vbr: \bar{v}_r "v-bar-r" +< * vbl: \bar{v}_l "v-bar-l" +< * +< * rop: r_o' "r-o-prime" +--- +> The skeleton corresponds to figure 4 of [1]. +> The efficient computations are those summarized in figure 7. +> +> Here's a correspondence between Java names and names in [1], +> expressed as approximate LaTeX source code and informally. +> Other names are identical. +> cb: \bar{c} "c-bar" +> cbr: \bar{c}_r "c-bar-r" +> cbl: \bar{c}_l "c-bar-l" +> +> vb: \bar{v} "v-bar" +> vbr: \bar{v}_r "v-bar-r" +> vbl: \bar{v}_l "v-bar-l" +> +> rop: r_o' "r-o-prime" +269,271c386,388 +< * flog10pow2(e) = floor(log_10(2^e)) +< * flog10threeQuartersPow2(e) = floor(log_10(3/4 2^e)) +< * flog2pow10(e) = floor(log_2(10^e)) +--- +> flog10pow2(e) = floor(log_10(2^e)) +> flog10threeQuartersPow2(e) = floor(log_10(3/4 2^e)) +> flog2pow10(e) = floor(log_2(10^e)) +274c391 +< /* regular spacing */ +--- +> // regular spacing +278c395 +< /* irregular spacing */ +--- +> // irregular spacing +284c401 +< /* g1 and g0 are as in section 9.8.3 of [1], so g = g1 2^63 + g0 */ +--- +> // g1 and g0 are as in section 9.9.3 of [1], so g = g1 2^63 + g0 +295,303c412,420 +< * For n = 17, m = 1 the table in section 10 of [1] shows +< * s' = floor(s / 10) = floor(s 115_292_150_460_684_698 / 2^60) +< * = floor(s 115_292_150_460_684_698 2^4 / 2^64) +< * +< * sp10 = 10 s' +< * tp10 = 10 t' +< * upin iff u' = sp10 10^k in Rv +< * wpin iff w' = tp10 10^k in Rv +< * See section 9.3 of [1]. +--- +> For n = 17, m = 1 the table in section 10 of [1] shows +> s' = floor(s / 10) = floor(s 115_292_150_460_684_698 / 2^60) +> = floor(s 115_292_150_460_684_698 2^4 / 2^64) +> +> sp10 = 10 s' +> tp10 = 10 t' +> upin iff u' = sp10 10^k in Rv +> wpin iff w' = tp10 10^k in Rv +> See section 9.4 of [1]. +315,318c432,435 +< * 10 <= s < 100 or s >= 100 and u', w' not in Rv +< * uin iff u = s 10^k in Rv +< * win iff w = t 10^k in Rv +< * See section 9.3 of [1]. +--- +> 10 <= s < 100 or s >= 100 and u', w' not in Rv +> uin iff u = s 10^k in Rv +> win iff w = t 10^k in Rv +> See section 9.4 of [1]. +324c441 +< /* Exactly one of u or w lies in Rv */ +--- +> // Exactly one of u or w lies in Rv. +328,329c445,446 +< * Both u and w lie in Rv: determine the one closest to v. +< * See section 9.3 of [1]. +--- +> Both u and w lie in Rv: determine the one closest to v. +> See section 9.4 of [1]. +336,337c453,454 +< * Computes rop(cp g 2^(-127)), where g = g1 2^63 + g0 +< * See section 9.9 and figure 8 of [1]. +--- +> Computes rop(cp g 2^(-127)), where g = g1 2^63 + g0 +> See section 9.10 and figure 5 of [1]. +349c466 +< * Formats the decimal f 10^e. +--- +> Formats the decimal f 10^e. +353,356c470,473 +< * For details not discussed here see section 10 of [1]. +< * +< * Determine len such that +< * 10^(len-1) <= f < 10^len +--- +> For details not discussed here see section 10 of [1]. +> +> Determine len such that +> 10^(len-1) <= f < 10^len +364,367c481,484 +< * Let fp and ep be the original f and e, respectively. +< * Transform f and e to ensure +< * 10^(H-1) <= f < 10^H +< * fp 10^ep = f 10^(e-H) = 0.f 10^e +--- +> Let fp and ep be the original f and e, respectively. +> Transform f and e to ensure +> 10^(H-1) <= f < 10^H +> fp 10^ep = f 10^(e-H) = 0.f 10^e +373,384c490,501 +< * The toChars?() methods perform left-to-right digits extraction +< * using ints, provided that the arguments are limited to 8 digits. +< * Therefore, split the H = 17 digits of f into: +< * h = the most significant digit of f +< * m = the next 8 most significant digits of f +< * l = the last 8, least significant digits of f +< * +< * For n = 17, m = 8 the table in section 10 of [1] shows +< * floor(f / 10^8) = floor(193_428_131_138_340_668 f / 2^84) = +< * floor(floor(193_428_131_138_340_668 f / 2^64) / 2^20) +< * and for n = 9, m = 8 +< * floor(hm / 10^8) = floor(1_441_151_881 hm / 2^57) +--- +> The toChars?() methods perform left-to-right digits extraction +> using ints, provided that the arguments are limited to 8 digits. +> Therefore, split the H = 17 digits of f into: +> h = the most significant digit of f +> m = the next 8 most significant digits of f +> l = the last 8, least significant digits of f +> +> For n = 17, m = 8 the table in section 10 of [1] shows +> floor(f / 10^8) = floor(193_428_131_138_340_668 f / 2^84) = +> floor(floor(193_428_131_138_340_668 f / 2^64) / 2^20) +> and for n = 9, m = 8 +> floor(hm / 10^8) = floor(1_441_151_881 hm / 2^57) +402,404c519,521 +< * 0 < e <= 7: plain format without leading zeroes. +< * Left-to-right digits extraction: +< * algorithm 1 in [3], with b = 10, k = 8, n = 28. +--- +> 0 < e <= 7: plain format without leading zeroes. +> Left-to-right digits extraction: +> algorithm 1 in [3], with b = 10, k = 8, n = 28. +426c543 +< /* -3 < e <= 0: plain format with leading zeroes */ +--- +> // -3 < e <= 0: plain format with leading zeroes. +439c556 +< /* -3 >= e | e > 7: computerized scientific notation */ +--- +> // -3 >= e | e > 7: computerized scientific notation +457,458c574,575 +< * Left-to-right digits extraction: +< * algorithm 1 in [3], with b = 10, k = 8, n = 28. +--- +> Left-to-right digits extraction: +> algorithm 1 in [3], with b = 10, k = 8, n = 28. +472c589 +< /* ... but do not remove the one directly to the right of '.' */ +--- +> // ... but do not remove the one directly to the right of '.' +480,485c597,602 +< * Algorithm 1 in [3] needs computation of +< * floor((a + 1) 2^n / b^k) - 1 +< * with a < 10^8, b = 10, k = 8, n = 28. +< * Noting that +< * (a + 1) 2^n <= 10^8 2^28 < 10^17 +< * For n = 17, m = 8 the table in section 10 of [1] leads to: +--- +> Algorithm 1 in [3] needs computation of +> floor((a + 1) 2^n / b^k) - 1 +> with a < 10^8, b = 10, k = 8, n = 28. +> Noting that +> (a + 1) 2^n <= 10^8 2^28 < 10^17 +> For n = 17, m = 8 the table in section 10 of [1] leads to: +505,506c622,623 +< * For n = 3, m = 2 the table in section 10 of [1] shows +< * floor(e / 100) = floor(1_311 e / 2^17) +--- +> For n = 3, m = 2 the table in section 10 of [1] shows +> floor(e / 100) = floor(1_311 e / 2^17) +513,514c630,631 +< * For n = 2, m = 1 the table in section 10 of [1] shows +< * floor(e / 10) = floor(103 e / 2^10) +--- +> For n = 2, m = 1 the table in section 10 of [1] shows +> floor(e / 10) = floor(103 e / 2^10) +529c646 +< /* Using the deprecated constructor enhances performance */ +--- +> // Using the deprecated constructor enhances performance. diff --git a/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_adjacent/diff_FloatToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_adjacent/diff_FloatToDecimal.java.txt new file mode 100644 index 0000000000..846513122f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_adjacent/diff_FloatToDecimal.java.txt @@ -0,0 +1,577 @@ +2,3c2 +< * Copyright (c) 2021, 2022, Oracle and/or its affiliates. All rights reserved. +< * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. +--- +> * Copyright 2018-2020 Raffaello Giulietti +5,9c4,9 +< * This code is free software; you can redistribute it and/or modify it +< * under the terms of the GNU General Public License version 2 only, as +< * published by the Free Software Foundation. Oracle designates this +< * particular file as subject to the "Classpath" exception as provided +< * by Oracle in the LICENSE file that accompanied this code. +--- +> * Permission is hereby granted, free of charge, to any person obtaining a copy +> * of this software and associated documentation files (the "Software"), to deal +> * in the Software without restriction, including without limitation the rights +> * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> * copies of the Software, and to permit persons to whom the Software is +> * furnished to do so, subject to the following conditions: +11,15c11,12 +< * This code is distributed in the hope that it will be useful, but WITHOUT +< * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +< * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +< * version 2 for more details (a copy is included in the LICENSE file that +< * accompanied this code). +--- +> * The above copyright notice and this permission notice shall be included in +> * all copies or substantial portions of the Software. +17,23c14,20 +< * You should have received a copy of the GNU General Public License version +< * 2 along with this work; if not, write to the Free Software Foundation, +< * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. +< * +< * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA +< * or visit www.oracle.com if you need additional information or have any +< * questions. +--- +> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +> * THE SOFTWARE. +30,32d26 +< import static java.lang.Float.floatToRawIntBits; +< import static java.lang.Integer.numberOfLeadingZeros; +< +39a34,36 +> import static java.lang.Float.floatToRawIntBits; +> import static java.lang.Integer.numberOfLeadingZeros; +> +41a39,40 +> * +> * @author Raffaello Giulietti +45,56c44,55 +< * For full details about this code see the following references: +< * +< * [1] Giulietti, "The Schubfach way to render doubles", +< * https://drive.google.com/file/d/1gp5xv4CAa78SVgCeWfGqqI4FfYYYuNFb +< * +< * [2] IEEE Computer Society, "IEEE Standard for Floating-Point Arithmetic" +< * +< * [3] Bouvier & Zimmermann, "Division-Free Binary-to-Decimal Conversion" +< * +< * Divisions are avoided altogether for the benefit of those architectures +< * that do not provide specific machine instructions or where they are slow. +< * This is discussed in section 10 of [1]. +--- +> For full details about this code see the following references: +> +> [1] Giulietti, "The Schubfach way to render doubles", +> https://drive.google.com/open?id=1luHhyQF9zKlM8yJ1nebU0OgVYhfC6CBN +> +> [2] IEEE Computer Society, "IEEE Standard for Floating-Point Arithmetic" +> +> [3] Bouvier & Zimmermann, "Division-Free Binary-to-Decimal Conversion" +> +> Divisions are avoided altogether for the benefit of those architectures +> that do not provide specific machine instructions or where they are slow. +> This is discussed in section 10 of [1]. +59c58,60 +< /* The precision in bits */ +--- +> // Sources with the license are here: https://github.com/c4f7fcce9cb06515/Schubfach/blob/3c92d3c9b1fead540616c918cdfef432bca53dfa/todec/src/math/FloatToDecimal.java +> +> // The precision in bits. +62c63 +< /* Exponent width in bits */ +--- +> // Exponent width in bits. +65,66c66,67 +< /* Minimum value of the exponent: -(2^(W-1)) - P + 3 */ +< static final int Q_MIN = (-1 << (W - 1)) - P + 3; +--- +> // Minimum value of the exponent: -(2^(W-1)) - P + 3. +> static final int Q_MIN = (-1 << W - 1) - P + 3; +68,69c69,70 +< /* Maximum value of the exponent: 2^(W-1) - P */ +< static final int Q_MAX = (1 << (W - 1)) - P; +--- +> // Maximum value of the exponent: 2^(W-1) - P. +> static final int Q_MAX = (1 << W - 1) - P; +71c72 +< /* 10^(E_MIN - 1) <= MIN_VALUE < 10^E_MIN */ +--- +> // 10^(E_MIN - 1) <= MIN_VALUE < 10^E_MIN +74c75 +< /* 10^(E_MAX - 1) <= MAX_VALUE < 10^E_MAX */ +--- +> // 10^(E_MAX - 1) <= MAX_VALUE < 10^E_MAX +77c78 +< /* Threshold to detect tiny values, as in section 8.2.1 of [1] */ +--- +> // Threshold to detect tiny values, as in section 8.1.1 of [1] +80c81 +< /* The minimum and maximum k, as in section 8 of [1] */ +--- +> // The minimum and maximum k, as in section 8 of [1] +84c85 +< /* H is as in section 8.1 of [1] */ +--- +> // H is as in section 8 of [1]. +87,88c88,89 +< /* Minimum value of the significand of a normal value: 2^(P-1) */ +< private static final int C_MIN = 1 << (P - 1); +--- +> // Minimum value of the significand of a normal value: 2^(P-1). +> private static final int C_MIN = 1 << P - 1; +90c91 +< /* Mask to extract the biased exponent */ +--- +> // Mask to extract the biased exponent. +93,94c94,95 +< /* Mask to extract the fraction bits */ +< private static final int T_MASK = (1 << (P - 1)) - 1; +--- +> // Mask to extract the fraction bits. +> private static final int T_MASK = (1 << P - 1) - 1; +96c97 +< /* Used in rop() */ +--- +> // Used in rop(). +99c100 +< /* Used for left-to-tight digit extraction */ +--- +> // Used for left-to-tight digit extraction. +102,107c103,112 +< private static final int NON_SPECIAL = 0; +< private static final int PLUS_ZERO = 1; +< private static final int MINUS_ZERO = 2; +< private static final int PLUS_INF = 3; +< private static final int MINUS_INF = 4; +< private static final int NAN = 5; +--- +> private static final int NON_SPECIAL = 0; +> private static final int PLUS_ZERO = 1; +> private static final int MINUS_ZERO = 2; +> private static final int PLUS_INF = 3; +> private static final int MINUS_INF = 4; +> private static final int NAN = 5; +> +> // For thread-safety, each thread gets its own instance of this class. +> private static final ThreadLocal threadLocal = +> ThreadLocal.withInitial(FloatToDecimal::new); +110,114c115,119 +< * Room for the longer of the forms +< * -ddddd.dddd H + 2 characters +< * -0.00ddddddddd H + 5 characters +< * -d.ddddddddE-ee H + 6 characters +< * where there are H digits d +--- +> Room for the longer of the forms +> -ddddd.dddd H + 2 characters +> -0.00ddddddddd H + 5 characters +> -d.ddddddddE-ee H + 6 characters +> where there are H digits d +116c121 +< public static final int MAX_CHARS = H + 6; +--- +> public final int MAX_CHARS = H + 6; +117a123 +> // Numerical results are created here... +120c126,129 +< /* Index into bytes of rightmost valid character */ +--- +> // ... and copied here in appendTo() +> private final char[] chars = new char[MAX_CHARS]; +> +> // Index into buf of rightmost valid character. +127,128c136,168 +< * Returns a string representation of the {@code float} +< * argument. All characters mentioned below are ASCII characters. +--- +> * Returns a string rendering of the {@code float} argument. +> * +> *

    The characters of the result are all drawn from the ASCII set. +> *

      +> *
    • Any NaN, whether quiet or signaling, is rendered as +> * {@code "NaN"}, regardless of the sign bit. +> *
    • The infinities +∞ and -∞ are rendered as +> * {@code "Infinity"} and {@code "-Infinity"}, respectively. +> *
    • The positive and negative zeroes are rendered as +> * {@code "0.0"} and {@code "-0.0"}, respectively. +> *
    • A finite negative {@code v} is rendered as the sign +> * '{@code -}' followed by the rendering of the magnitude -{@code v}. +> *
    • A finite positive {@code v} is rendered in two stages: +> *
        +> *
      • Selection of a decimal: A well-defined +> * decimal dv is selected +> * to represent {@code v}. +> *
      • Formatting as a string: The decimal +> * dv is formatted as a string, +> * either in plain or in computerized scientific notation, +> * depending on its value. +> *
      +> *
    +> * +> *

    A decimal is a number of the form +> * d×10i +> * for some (unique) integers d > 0 and i such that +> * d is not a multiple of 10. +> * These integers are the significand and +> * the exponent, respectively, of the decimal. +> * The length of the decimal is the (unique) +> * integer n meeting +> * 10n-1d < 10n. +130,132c170,246 +< * @param v the {@code float} to be converted. +< * @return a string representation of the argument. +< * @see Float#toString(float) +--- +> *

    The decimal dv +> * for a finite positive {@code v} is defined as follows: +> *

      +> *
    • Let R be the set of all decimals that round to {@code v} +> * according to the usual round-to-closest rule of +> * IEEE 754 floating-point arithmetic. +> *
    • Let m be the minimal length over all decimals in R. +> *
    • When m ≥ 2, let T be the set of all decimals +> * in R with length m. +> * Otherwise, let T be the set of all decimals +> * in R with length 1 or 2. +> *
    • Define dv as +> * the decimal in T that is closest to {@code v}. +> * Or if there are two such decimals in T, +> * select the one with the even significand (there is exactly one). +> *
    +> * +> *

    The (uniquely) selected decimal dv +> * is then formatted. +> * +> *

    Let d, i and n be the significand, exponent and +> * length of dv, respectively. +> * Further, let e = n + i - 1 and let +> * d1dn +> * be the usual decimal expansion of the significand. +> * Note that d1 ≠ 0 ≠ dn. +> *

      +> *
    • Case -3 ≤ e < 0: +> * dv is formatted as +> * 0.00d1dn, +> * where there are exactly -(n + i) zeroes between +> * the decimal point and d1. +> * For example, 123 × 10-4 is formatted as +> * {@code 0.0123}. +> *
    • Case 0 ≤ e < 7: +> *
        +> *
      • Subcase i ≥ 0: +> * dv is formatted as +> * d1dn00.0, +> * where there are exactly i zeroes +> * between dn and the decimal point. +> * For example, 123 × 102 is formatted as +> * {@code 12300.0}. +> *
      • Subcase i < 0: +> * dv is formatted as +> * d1dn+i.dn+i+1dn. +> * There are exactly -i digits to the right of +> * the decimal point. +> * For example, 123 × 10-1 is formatted as +> * {@code 12.3}. +> *
      +> *
    • Case e < -3 or e ≥ 7: +> * computerized scientific notation is used to format +> * dv. +> * Here e is formatted as by {@link Integer#toString(int)}. +> *
        +> *
      • Subcase n = 1: +> * dv is formatted as +> * d1.0Ee. +> * For example, 1 × 1023 is formatted as +> * {@code 1.0E23}. +> *
      • Subcase n > 1: +> * dv is formatted as +> * d1.d2dnEe. +> * For example, 123 × 10-21 is formatted as +> * {@code 1.23E-19}. +> *
      +> *
    +> * +> * @param v the {@code float} to be rendered. +> * @return a string rendering of the argument. +135c249 +< return new FloatToDecimal().toDecimalString(v); +--- +> return threadLocalInstance().toDecimalString(v); +151c265,269 +< return new FloatToDecimal().appendDecimalTo(v, app); +--- +> return threadLocalInstance().appendDecimalTo(v, app); +> } +> +> private static FloatToDecimal threadLocalInstance() { +> return threadLocal.get(); +169,170c287 +< char[] chars = new char[index + 1]; +< for (int i = 0; i < chars.length; ++i) { +--- +> for (int i = 0; i <= index; ++i) { +174c291 +< return ((StringBuilder) app).append(chars); +--- +> return ((StringBuilder) app).append(chars, 0, index + 1); +177c294 +< return ((StringBuffer) app).append(chars); +--- +> return ((StringBuffer) app).append(chars, 0, index + 1); +179,180c296,297 +< for (char c : chars) { +< app.append(c); +--- +> for (int i = 0; i <= index; ++i) { +> app.append(chars[i]); +192,197c309,314 +< * Returns +< * PLUS_ZERO iff v is 0.0 +< * MINUS_ZERO iff v is -0.0 +< * PLUS_INF iff v is POSITIVE_INFINITY +< * MINUS_INF iff v is NEGATIVE_INFINITY +< * NAN iff v is NaN +--- +> Returns +> PLUS_ZERO iff v is 0.0 +> MINUS_ZERO iff v is -0.0 +> PLUS_INF iff v is POSITIVE_INFINITY +> MINUS_INF iff v is NEGATIVE_INFINITY +> NAN iff v is NaN +201,207c318,324 +< * For full details see references [2] and [1]. +< * +< * For finite v != 0, determine integers c and q such that +< * |v| = c 2^q and +< * Q_MIN <= q <= Q_MAX and +< * either 2^(P-1) <= c < 2^P (normal) +< * or 0 < c < 2^(P-1) and q = Q_MIN (subnormal) +--- +> For full details see references [2] and [1]. +> +> For finite v != 0, determine integers c and q such that +> |v| = c 2^q and +> Q_MIN <= q <= Q_MAX and +> either 2^(P-1) <= c < 2^P (normal) +> or 0 < c < 2^(P-1) and q = Q_MIN (subnormal) +218c335 +< /* normal value. Here mq = -q */ +--- +> // normal value. Here mq = -q +221c338 +< /* The fast path discussed in section 8.3 of [1] */ +--- +> // The fast path discussed in section 8.2 of [1]. +231c348 +< /* subnormal value */ +--- +> // subnormal value +246,261c363,378 +< * The skeleton corresponds to figure 7 of [1]. +< * The efficient computations are those summarized in figure 9. +< * Also check the appendix. +< * +< * Here's a correspondence between Java names and names in [1], +< * expressed as approximate LaTeX source code and informally. +< * Other names are identical. +< * cb: \bar{c} "c-bar" +< * cbr: \bar{c}_r "c-bar-r" +< * cbl: \bar{c}_l "c-bar-l" +< * +< * vb: \bar{v} "v-bar" +< * vbr: \bar{v}_r "v-bar-r" +< * vbl: \bar{v}_l "v-bar-l" +< * +< * rop: r_o' "r-o-prime" +--- +> The skeleton corresponds to figure 4 of [1]. +> The efficient computations are those summarized in figure 7. +> Also check the appendix. +> +> Here's a correspondence between Java names and names in [1], +> expressed as approximate LaTeX source code and informally. +> Other names are identical. +> cb: \bar{c} "c-bar" +> cbr: \bar{c}_r "c-bar-r" +> cbl: \bar{c}_l "c-bar-l" +> +> vb: \bar{v} "v-bar" +> vbr: \bar{v}_r "v-bar-r" +> vbl: \bar{v}_l "v-bar-l" +> +> rop: r_o' "r-o-prime" +269,271c386,388 +< * flog10pow2(e) = floor(log_10(2^e)) +< * flog10threeQuartersPow2(e) = floor(log_10(3/4 2^e)) +< * flog2pow10(e) = floor(log_2(10^e)) +--- +> flog10pow2(e) = floor(log_10(2^e)) +> flog10threeQuartersPow2(e) = floor(log_10(3/4 2^e)) +> flog2pow10(e) = floor(log_2(10^e)) +274c391 +< /* regular spacing */ +--- +> // regular spacing +278c395 +< /* irregular spacing */ +--- +> // irregular spacing0 +284c401 +< /* g is as in the appendix */ +--- +> // g is as in the appendix +294,301c411,418 +< * For n = 9, m = 1 the table in section 10 of [1] shows +< * s' = floor(s / 10) = floor(s 1_717_986_919 / 2^34) +< * +< * sp10 = 10 s' +< * tp10 = 10 t' +< * upin iff u' = sp10 10^k in Rv +< * wpin iff w' = tp10 10^k in Rv +< * See section 9.3 of [1]. +--- +> For n = 9, m = 1 the table in section 10 of [1] shows +> s' = floor(s / 10) = floor(s 1_717_986_919 / 2^34) +> +> sp10 = 10 s' +> tp10 = 10 t' +> upin iff u' = sp10 10^k in Rv +> wpin iff w' = tp10 10^k in Rv +> See section 9.4 of [1]. +313,316c430,433 +< * 10 <= s < 100 or s >= 100 and u', w' not in Rv +< * uin iff u = s 10^k in Rv +< * win iff w = t 10^k in Rv +< * See section 9.3 of [1]. +--- +> 10 <= s < 100 or s >= 100 and u', w' not in Rv +> uin iff u = s 10^k in Rv +> win iff w = t 10^k in Rv +> See section 9.4 of [1]. +322c439 +< /* Exactly one of u or w lies in Rv */ +--- +> // Exactly one of u or w lies in Rv. +326,327c443,444 +< * Both u and w lie in Rv: determine the one closest to v. +< * See section 9.3 of [1]. +--- +> Both u and w lie in Rv: determine the one closest to v. +> See section 9.4 of [1]. +334,335c451,452 +< * Computes rop(cp g 2^(-95)) +< * See appendix and figure 11 of [1]. +--- +> Computes rop(cp g 2^(-95)) +> See appendix and figure 8 of [1]. +344c461 +< * Formats the decimal f 10^e. +--- +> Formats the decimal f 10^e. +348,351c465,468 +< * For details not discussed here see section 10 of [1]. +< * +< * Determine len such that +< * 10^(len-1) <= f < 10^len +--- +> For details not discussed here see section 10 of [1]. +> +> Determine len such that +> 10^(len-1) <= f < 10^len +359,362c476,479 +< * Let fp and ep be the original f and e, respectively. +< * Transform f and e to ensure +< * 10^(H-1) <= f < 10^H +< * fp 10^ep = f 10^(e-H) = 0.f 10^e +--- +> Let fp and ep be the original f and e, respectively. +> Transform f and e to ensure +> 10^(H-1) <= f < 10^H +> fp 10^ep = f 10^(e-H) = 0.f 10^e +364c481 +< f *= (int)pow10(H - len); +--- +> f *= pow10(H - len); +368,375c485,492 +< * The toChars?() methods perform left-to-right digits extraction +< * using ints, provided that the arguments are limited to 8 digits. +< * Therefore, split the H = 9 digits of f into: +< * h = the most significant digit of f +< * l = the last 8, least significant digits of f +< * +< * For n = 9, m = 8 the table in section 10 of [1] shows +< * floor(f / 10^8) = floor(1_441_151_881 f / 2^57) +--- +> The toChars?() methods perform left-to-right digits extraction +> using ints, provided that the arguments are limited to 8 digits. +> Therefore, split the H = 9 digits of f into: +> h = the most significant digit of f +> l = the last 8, least significant digits of f +> +> For n = 9, m = 8 the table in section 10 of [1] shows +> floor(f / 10^8) = floor(1_441_151_881 f / 2^57) +391,393c508,510 +< * 0 < e <= 7: plain format without leading zeroes. +< * Left-to-right digits extraction: +< * algorithm 1 in [3], with b = 10, k = 8, n = 28. +--- +> 0 < e <= 7: plain format without leading zeroes. +> Left-to-right digits extraction: +> algorithm 1 in [3], with b = 10, k = 8, n = 28. +415c532 +< /* -3 < e <= 0: plain format with leading zeroes */ +--- +> // -3 < e <= 0: plain format with leading zeroes. +428c545 +< /* -3 >= e | e > 7: computerized scientific notation */ +--- +> // -3 >= e | e > 7: computerized scientific notation +439,440c556,557 +< * Left-to-right digits extraction: +< * algorithm 1 in [3], with b = 10, k = 8, n = 28. +--- +> Left-to-right digits extraction: +> algorithm 1 in [3], with b = 10, k = 8, n = 28. +454c571 +< /* ... but do not remove the one directly to the right of '.' */ +--- +> // ... but do not remove the one directly to the right of '.' +462,467c579,584 +< * Algorithm 1 in [3] needs computation of +< * floor((a + 1) 2^n / b^k) - 1 +< * with a < 10^8, b = 10, k = 8, n = 28. +< * Noting that +< * (a + 1) 2^n <= 10^8 2^28 < 10^17 +< * For n = 17, m = 8 the table in section 10 of [1] leads to: +--- +> Algorithm 1 in [3] needs computation of +> floor((a + 1) 2^n / b^k) - 1 +> with a < 10^8, b = 10, k = 8, n = 28. +> Noting that +> (a + 1) 2^n <= 10^8 2^28 < 10^17 +> For n = 17, m = 8 the table in section 10 of [1] leads to: +485,486c602,603 +< * For n = 2, m = 1 the table in section 10 of [1] shows +< * floor(e / 10) = floor(103 e / 2^10) +--- +> For n = 2, m = 1 the table in section 10 of [1] shows +> floor(e / 10) = floor(103 e / 2^10) +501c618 +< /* Using the deprecated constructor enhances performance */ +--- +> // Using the deprecated constructor enhances performance. +507a625 +> diff --git a/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_ignorespace/diff_DoubleToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_ignorespace/diff_DoubleToDecimal.java.txt new file mode 100644 index 0000000000..318253f342 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_ignorespace/diff_DoubleToDecimal.java.txt @@ -0,0 +1,36 @@ +27,30d26 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/io/schubfach/DoubleToDecimal.java +< import static java.lang.Double.doubleToRawLongBits; +< import static java.lang.Long.numberOfLeadingZeros; +< +38,49c34 +< ||||||| d63cef092:src/main/java/com/fasterxml/jackson/core/io/schubfach/DoubleToDecimal.java +< import static java.lang.Double.*; +< import static java.lang.Long.*; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.*; +< ======= +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10pow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10threeQuartersPow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog2pow10; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g0; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g1; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.multiplyHigh; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.pow10; +--- +> +52d36 +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/io/schubfach/DoubleToDecimal.java +120,125c104,109 +< private static final int NON_SPECIAL = 0; +< private static final int PLUS_ZERO = 1; +< private static final int MINUS_ZERO = 2; +< private static final int PLUS_INF = 3; +< private static final int MINUS_INF = 4; +< private static final int NAN = 5; +--- +> private static final int NON_SPECIAL = 0; +> private static final int PLUS_ZERO = 1; +> private static final int MINUS_ZERO = 2; +> private static final int PLUS_INF = 3; +> private static final int MINUS_INF = 4; +> private static final int NAN = 5; diff --git a/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_ignorespace/diff_FloatToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_ignorespace/diff_FloatToDecimal.java.txt new file mode 100644 index 0000000000..03eb31fb3c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_ignorespace/diff_FloatToDecimal.java.txt @@ -0,0 +1,35 @@ +27,30d26 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/io/schubfach/FloatToDecimal.java +< import static java.lang.Float.floatToRawIntBits; +< import static java.lang.Integer.numberOfLeadingZeros; +< +37,47c33 +< ||||||| d63cef092:src/main/java/com/fasterxml/jackson/core/io/schubfach/FloatToDecimal.java +< import static java.lang.Float.*; +< import static java.lang.Integer.*; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.*; +< ======= +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10pow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10threeQuartersPow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog2pow10; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g1; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.multiplyHigh; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.pow10; +--- +> +50d35 +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/io/schubfach/FloatToDecimal.java +118,123c103,108 +< private static final int NON_SPECIAL = 0; +< private static final int PLUS_ZERO = 1; +< private static final int MINUS_ZERO = 2; +< private static final int PLUS_INF = 3; +< private static final int MINUS_INF = 4; +< private static final int NAN = 5; +--- +> private static final int NON_SPECIAL = 0; +> private static final int PLUS_ZERO = 1; +> private static final int MINUS_ZERO = 2; +> private static final int PLUS_INF = 3; +> private static final int MINUS_INF = 4; +> private static final int NAN = 5; diff --git a/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_imports/diff_DoubleToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_imports/diff_DoubleToDecimal.java.txt new file mode 100644 index 0000000000..8d03f594b1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_imports/diff_DoubleToDecimal.java.txt @@ -0,0 +1,585 @@ +2,3c2 +< * Copyright (c) 2021, 2022, Oracle and/or its affiliates. All rights reserved. +< * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. +--- +> * Copyright 2018-2020 Raffaello Giulietti +5,9c4,9 +< * This code is free software; you can redistribute it and/or modify it +< * under the terms of the GNU General Public License version 2 only, as +< * published by the Free Software Foundation. Oracle designates this +< * particular file as subject to the "Classpath" exception as provided +< * by Oracle in the LICENSE file that accompanied this code. +--- +> * Permission is hereby granted, free of charge, to any person obtaining a copy +> * of this software and associated documentation files (the "Software"), to deal +> * in the Software without restriction, including without limitation the rights +> * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> * copies of the Software, and to permit persons to whom the Software is +> * furnished to do so, subject to the following conditions: +11,15c11,12 +< * This code is distributed in the hope that it will be useful, but WITHOUT +< * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +< * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +< * version 2 for more details (a copy is included in the LICENSE file that +< * accompanied this code). +--- +> * The above copyright notice and this permission notice shall be included in +> * all copies or substantial portions of the Software. +17,23c14,20 +< * You should have received a copy of the GNU General Public License version +< * 2 along with this work; if not, write to the Free Software Foundation, +< * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. +< * +< * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA +< * or visit www.oracle.com if you need additional information or have any +< * questions. +--- +> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +> * THE SOFTWARE. +30,32d26 +< import static java.lang.Double.doubleToRawLongBits; +< import static java.lang.Long.numberOfLeadingZeros; +< +40a35,37 +> import static java.lang.Double.doubleToRawLongBits; +> import static java.lang.Long.numberOfLeadingZeros; +> +42a40,41 +> * +> * @author Raffaello Giulietti +46,57c45,56 +< * For full details about this code see the following references: +< * +< * [1] Giulietti, "The Schubfach way to render doubles", +< * https://drive.google.com/file/d/1gp5xv4CAa78SVgCeWfGqqI4FfYYYuNFb +< * +< * [2] IEEE Computer Society, "IEEE Standard for Floating-Point Arithmetic" +< * +< * [3] Bouvier & Zimmermann, "Division-Free Binary-to-Decimal Conversion" +< * +< * Divisions are avoided altogether for the benefit of those architectures +< * that do not provide specific machine instructions or where they are slow. +< * This is discussed in section 10 of [1]. +--- +> For full details about this code see the following references: +> +> [1] Giulietti, "The Schubfach way to render doubles", +> https://drive.google.com/open?id=1luHhyQF9zKlM8yJ1nebU0OgVYhfC6CBN +> +> [2] IEEE Computer Society, "IEEE Standard for Floating-Point Arithmetic" +> +> [3] Bouvier & Zimmermann, "Division-Free Binary-to-Decimal Conversion" +> +> Divisions are avoided altogether for the benefit of those architectures +> that do not provide specific machine instructions or where they are slow. +> This is discussed in section 10 of [1]. +60c59,61 +< /* The precision in bits */ +--- +> // Sources with the license are here: https://github.com/c4f7fcce9cb06515/Schubfach/blob/3c92d3c9b1fead540616c918cdfef432bca53dfa/todec/src/math/FloatToDecimal.java +> +> // The precision in bits. +63c64 +< /* Exponent width in bits */ +--- +> // Exponent width in bits. +66,67c67,68 +< /* Minimum value of the exponent: -(2^(W-1)) - P + 3 */ +< static final int Q_MIN = (-1 << (W - 1)) - P + 3; +--- +> // Minimum value of the exponent: -(2^(W-1)) - P + 3. +> static final int Q_MIN = (-1 << W - 1) - P + 3; +69,70c70,71 +< /* Maximum value of the exponent: 2^(W-1) - P */ +< static final int Q_MAX = (1 << (W - 1)) - P; +--- +> // Maximum value of the exponent: 2^(W-1) - P. +> static final int Q_MAX = (1 << W - 1) - P; +72c73 +< /* 10^(E_MIN - 1) <= MIN_VALUE < 10^E_MIN */ +--- +> // 10^(E_MIN - 1) <= MIN_VALUE < 10^E_MIN +75c76 +< /* 10^(E_MAX - 1) <= MAX_VALUE < 10^E_MAX */ +--- +> // 10^(E_MAX - 1) <= MAX_VALUE < 10^E_MAX +78c79 +< /* Threshold to detect tiny values, as in section 8.2.1 of [1] */ +--- +> // Threshold to detect tiny values, as in section 8.1.1 of [1] +81c82 +< /* The minimum and maximum k, as in section 8 of [1] */ +--- +> // The minimum and maximum k, as in section 8 of [1] +85c86 +< /* H is as in section 8.1 of [1] */ +--- +> // H is as in section 8 of [1]. +88,89c89,90 +< /* Minimum value of the significand of a normal value: 2^(P-1) */ +< private static final long C_MIN = 1L << (P - 1); +--- +> // Minimum value of the significand of a normal value: 2^(P-1). +> private static final long C_MIN = 1L << P - 1; +91c92 +< /* Mask to extract the biased exponent */ +--- +> // Mask to extract the biased exponent. +94,95c95,96 +< /* Mask to extract the fraction bits */ +< private static final long T_MASK = (1L << (P - 1)) - 1; +--- +> // Mask to extract the fraction bits. +> private static final long T_MASK = (1L << P - 1) - 1; +97c98 +< /* Used in rop() */ +--- +> // Used in rop(). +100c101 +< /* Used for left-to-tight digit extraction */ +--- +> // Used for left-to-tight digit extraction. +103,108c104,113 +< private static final int NON_SPECIAL = 0; +< private static final int PLUS_ZERO = 1; +< private static final int MINUS_ZERO = 2; +< private static final int PLUS_INF = 3; +< private static final int MINUS_INF = 4; +< private static final int NAN = 5; +--- +> private static final int NON_SPECIAL = 0; +> private static final int PLUS_ZERO = 1; +> private static final int MINUS_ZERO = 2; +> private static final int PLUS_INF = 3; +> private static final int MINUS_INF = 4; +> private static final int NAN = 5; +> +> // For thread-safety, each thread gets its own instance of this class. +> private static final ThreadLocal threadLocal = +> ThreadLocal.withInitial(DoubleToDecimal::new); +111,115c116,120 +< * Room for the longer of the forms +< * -ddddd.dddddddddddd H + 2 characters +< * -0.00ddddddddddddddddd H + 5 characters +< * -d.ddddddddddddddddE-eee H + 7 characters +< * where there are H digits d +--- +> Room for the longer of the forms +> -ddddd.dddddddddddd H + 2 characters +> -0.00ddddddddddddddddd H + 5 characters +> -d.ddddddddddddddddE-eee H + 7 characters +> where there are H digits d +117c122 +< public static final int MAX_CHARS = H + 7; +--- +> public final int MAX_CHARS = H + 7; +118a124 +> // Numerical results are created here... +121c127,130 +< /* Index into bytes of rightmost valid character */ +--- +> // ... and copied here in appendTo() +> private final char[] chars = new char[MAX_CHARS]; +> +> // Index into bytes of rightmost valid character. +128,129c137,189 +< * Returns a string representation of the {@code double} +< * argument. All characters mentioned below are ASCII characters. +--- +> * Returns a string rendering of the {@code double} argument. +> * +> *

    The characters of the result are all drawn from the ASCII set. +> *

      +> *
    • Any NaN, whether quiet or signaling, is rendered as +> * {@code "NaN"}, regardless of the sign bit. +> *
    • The infinities +∞ and -∞ are rendered as +> * {@code "Infinity"} and {@code "-Infinity"}, respectively. +> *
    • The positive and negative zeroes are rendered as +> * {@code "0.0"} and {@code "-0.0"}, respectively. +> *
    • A finite negative {@code v} is rendered as the sign +> * '{@code -}' followed by the rendering of the magnitude -{@code v}. +> *
    • A finite positive {@code v} is rendered in two stages: +> *
        +> *
      • Selection of a decimal: A well-defined +> * decimal dv is selected +> * to represent {@code v}. +> *
      • Formatting as a string: The decimal +> * dv is formatted as a string, +> * either in plain or in computerized scientific notation, +> * depending on its value. +> *
      +> *
    +> * +> *

    A decimal is a number of the form +> * d×10i +> * for some (unique) integers d > 0 and i such that +> * d is not a multiple of 10. +> * These integers are the significand and +> * the exponent, respectively, of the decimal. +> * The length of the decimal is the (unique) +> * integer n meeting +> * 10n-1d < 10n. +> * +> *

    The decimal dv +> * for a finite positive {@code v} is defined as follows: +> *

      +> *
    • Let R be the set of all decimals that round to {@code v} +> * according to the usual round-to-closest rule of +> * IEEE 754 floating-point arithmetic. +> *
    • Let m be the minimal length over all decimals in R. +> *
    • When m ≥ 2, let T be the set of all decimals +> * in R with length m. +> * Otherwise, let T be the set of all decimals +> * in R with length 1 or 2. +> *
    • Define dv as +> * the decimal in T that is closest to {@code v}. +> * Or if there are two such decimals in T, +> * select the one with the even significand (there is exactly one). +> *
    +> * +> *

    The (uniquely) selected decimal dv +> * is then formatted. +131,133c191,247 +< * @param v the {@code double} to be converted. +< * @return a string representation of the argument. +< * @see Double#toString(double) +--- +> *

    Let d, i and n be the significand, exponent and +> * length of dv, respectively. +> * Further, let e = n + i - 1 and let +> * d1dn +> * be the usual decimal expansion of the significand. +> * Note that d1 ≠ 0 ≠ dn. +> *

      +> *
    • Case -3 ≤ e < 0: +> * dv is formatted as +> * 0.00d1dn, +> * where there are exactly -(n + i) zeroes between +> * the decimal point and d1. +> * For example, 123 × 10-4 is formatted as +> * {@code 0.0123}. +> *
    • Case 0 ≤ e < 7: +> *
        +> *
      • Subcase i ≥ 0: +> * dv is formatted as +> * d1dn00.0, +> * where there are exactly i zeroes +> * between dn and the decimal point. +> * For example, 123 × 102 is formatted as +> * {@code 12300.0}. +> *
      • Subcase i < 0: +> * dv is formatted as +> * d1dn+i.dn+i+1dn. +> * There are exactly -i digits to the right of +> * the decimal point. +> * For example, 123 × 10-1 is formatted as +> * {@code 12.3}. +> *
      +> *
    • Case e < -3 or e ≥ 7: +> * computerized scientific notation is used to format +> * dv. +> * Here e is formatted as by {@link Integer#toString(int)}. +> *
        +> *
      • Subcase n = 1: +> * dv is formatted as +> * d1.0Ee. +> * For example, 1 × 1023 is formatted as +> * {@code 1.0E23}. +> *
      • Subcase n > 1: +> * dv is formatted as +> * d1.d2dnEe. +> * For example, 123 × 10-21 is formatted as +> * {@code 1.23E-19}. +> *
      +> *
    +> * +> * @param v the {@code double} to be rendered. +> * @return a string rendering of the argument. +136c250 +< return new DoubleToDecimal().toDecimalString(v); +--- +> return threadLocalInstance().toDecimalString(v); +152c266,270 +< return new DoubleToDecimal().appendDecimalTo(v, app); +--- +> return threadLocalInstance().appendDecimalTo(v, app); +> } +> +> private static DoubleToDecimal threadLocalInstance() { +> return threadLocal.get(); +170,171c288 +< char[] chars = new char[index + 1]; +< for (int i = 0; i < chars.length; ++i) { +--- +> for (int i = 0; i <= index; ++i) { +175c292 +< return ((StringBuilder) app).append(chars); +--- +> return ((StringBuilder) app).append(chars, 0, index + 1); +178c295 +< return ((StringBuffer) app).append(chars); +--- +> return ((StringBuffer) app).append(chars, 0, index + 1); +180,181c297,298 +< for (char c : chars) { +< app.append(c); +--- +> for (int i = 0; i <= index; ++i) { +> app.append(chars[i]); +193,198c310,315 +< * Returns +< * PLUS_ZERO iff v is 0.0 +< * MINUS_ZERO iff v is -0.0 +< * PLUS_INF iff v is POSITIVE_INFINITY +< * MINUS_INF iff v is NEGATIVE_INFINITY +< * NAN iff v is NaN +--- +> Returns +> PLUS_ZERO iff v is 0.0 +> MINUS_ZERO iff v is -0.0 +> PLUS_INF iff v is POSITIVE_INFINITY +> MINUS_INF iff v is NEGATIVE_INFINITY +> NAN iff v is NaN +202,208c319,325 +< * For full details see references [2] and [1]. +< * +< * For finite v != 0, determine integers c and q such that +< * |v| = c 2^q and +< * Q_MIN <= q <= Q_MAX and +< * either 2^(P-1) <= c < 2^P (normal) +< * or 0 < c < 2^(P-1) and q = Q_MIN (subnormal) +--- +> For full details see references [2] and [1]. +> +> For finite v != 0, determine integers c and q such that +> |v| = c 2^q and +> Q_MIN <= q <= Q_MAX and +> either 2^(P-1) <= c < 2^P (normal) +> or 0 < c < 2^(P-1) and q = Q_MIN (subnormal) +219c336 +< /* normal value. Here mq = -q */ +--- +> // normal value. Here mq = -q +222c339 +< /* The fast path discussed in section 8.3 of [1] */ +--- +> // The fast path discussed in section 8.2 of [1]. +232c349 +< /* subnormal value */ +--- +> // subnormal value +247,261c364,378 +< * The skeleton corresponds to figure 7 of [1]. +< * The efficient computations are those summarized in figure 9. +< * +< * Here's a correspondence between Java names and names in [1], +< * expressed as approximate LaTeX source code and informally. +< * Other names are identical. +< * cb: \bar{c} "c-bar" +< * cbr: \bar{c}_r "c-bar-r" +< * cbl: \bar{c}_l "c-bar-l" +< * +< * vb: \bar{v} "v-bar" +< * vbr: \bar{v}_r "v-bar-r" +< * vbl: \bar{v}_l "v-bar-l" +< * +< * rop: r_o' "r-o-prime" +--- +> The skeleton corresponds to figure 4 of [1]. +> The efficient computations are those summarized in figure 7. +> +> Here's a correspondence between Java names and names in [1], +> expressed as approximate LaTeX source code and informally. +> Other names are identical. +> cb: \bar{c} "c-bar" +> cbr: \bar{c}_r "c-bar-r" +> cbl: \bar{c}_l "c-bar-l" +> +> vb: \bar{v} "v-bar" +> vbr: \bar{v}_r "v-bar-r" +> vbl: \bar{v}_l "v-bar-l" +> +> rop: r_o' "r-o-prime" +269,271c386,388 +< * flog10pow2(e) = floor(log_10(2^e)) +< * flog10threeQuartersPow2(e) = floor(log_10(3/4 2^e)) +< * flog2pow10(e) = floor(log_2(10^e)) +--- +> flog10pow2(e) = floor(log_10(2^e)) +> flog10threeQuartersPow2(e) = floor(log_10(3/4 2^e)) +> flog2pow10(e) = floor(log_2(10^e)) +274c391 +< /* regular spacing */ +--- +> // regular spacing +278c395 +< /* irregular spacing */ +--- +> // irregular spacing +284c401 +< /* g1 and g0 are as in section 9.8.3 of [1], so g = g1 2^63 + g0 */ +--- +> // g1 and g0 are as in section 9.9.3 of [1], so g = g1 2^63 + g0 +295,303c412,420 +< * For n = 17, m = 1 the table in section 10 of [1] shows +< * s' = floor(s / 10) = floor(s 115_292_150_460_684_698 / 2^60) +< * = floor(s 115_292_150_460_684_698 2^4 / 2^64) +< * +< * sp10 = 10 s' +< * tp10 = 10 t' +< * upin iff u' = sp10 10^k in Rv +< * wpin iff w' = tp10 10^k in Rv +< * See section 9.3 of [1]. +--- +> For n = 17, m = 1 the table in section 10 of [1] shows +> s' = floor(s / 10) = floor(s 115_292_150_460_684_698 / 2^60) +> = floor(s 115_292_150_460_684_698 2^4 / 2^64) +> +> sp10 = 10 s' +> tp10 = 10 t' +> upin iff u' = sp10 10^k in Rv +> wpin iff w' = tp10 10^k in Rv +> See section 9.4 of [1]. +315,318c432,435 +< * 10 <= s < 100 or s >= 100 and u', w' not in Rv +< * uin iff u = s 10^k in Rv +< * win iff w = t 10^k in Rv +< * See section 9.3 of [1]. +--- +> 10 <= s < 100 or s >= 100 and u', w' not in Rv +> uin iff u = s 10^k in Rv +> win iff w = t 10^k in Rv +> See section 9.4 of [1]. +324c441 +< /* Exactly one of u or w lies in Rv */ +--- +> // Exactly one of u or w lies in Rv. +328,329c445,446 +< * Both u and w lie in Rv: determine the one closest to v. +< * See section 9.3 of [1]. +--- +> Both u and w lie in Rv: determine the one closest to v. +> See section 9.4 of [1]. +336,337c453,454 +< * Computes rop(cp g 2^(-127)), where g = g1 2^63 + g0 +< * See section 9.9 and figure 8 of [1]. +--- +> Computes rop(cp g 2^(-127)), where g = g1 2^63 + g0 +> See section 9.10 and figure 5 of [1]. +349c466 +< * Formats the decimal f 10^e. +--- +> Formats the decimal f 10^e. +353,356c470,473 +< * For details not discussed here see section 10 of [1]. +< * +< * Determine len such that +< * 10^(len-1) <= f < 10^len +--- +> For details not discussed here see section 10 of [1]. +> +> Determine len such that +> 10^(len-1) <= f < 10^len +364,367c481,484 +< * Let fp and ep be the original f and e, respectively. +< * Transform f and e to ensure +< * 10^(H-1) <= f < 10^H +< * fp 10^ep = f 10^(e-H) = 0.f 10^e +--- +> Let fp and ep be the original f and e, respectively. +> Transform f and e to ensure +> 10^(H-1) <= f < 10^H +> fp 10^ep = f 10^(e-H) = 0.f 10^e +373,384c490,501 +< * The toChars?() methods perform left-to-right digits extraction +< * using ints, provided that the arguments are limited to 8 digits. +< * Therefore, split the H = 17 digits of f into: +< * h = the most significant digit of f +< * m = the next 8 most significant digits of f +< * l = the last 8, least significant digits of f +< * +< * For n = 17, m = 8 the table in section 10 of [1] shows +< * floor(f / 10^8) = floor(193_428_131_138_340_668 f / 2^84) = +< * floor(floor(193_428_131_138_340_668 f / 2^64) / 2^20) +< * and for n = 9, m = 8 +< * floor(hm / 10^8) = floor(1_441_151_881 hm / 2^57) +--- +> The toChars?() methods perform left-to-right digits extraction +> using ints, provided that the arguments are limited to 8 digits. +> Therefore, split the H = 17 digits of f into: +> h = the most significant digit of f +> m = the next 8 most significant digits of f +> l = the last 8, least significant digits of f +> +> For n = 17, m = 8 the table in section 10 of [1] shows +> floor(f / 10^8) = floor(193_428_131_138_340_668 f / 2^84) = +> floor(floor(193_428_131_138_340_668 f / 2^64) / 2^20) +> and for n = 9, m = 8 +> floor(hm / 10^8) = floor(1_441_151_881 hm / 2^57) +402,404c519,521 +< * 0 < e <= 7: plain format without leading zeroes. +< * Left-to-right digits extraction: +< * algorithm 1 in [3], with b = 10, k = 8, n = 28. +--- +> 0 < e <= 7: plain format without leading zeroes. +> Left-to-right digits extraction: +> algorithm 1 in [3], with b = 10, k = 8, n = 28. +426c543 +< /* -3 < e <= 0: plain format with leading zeroes */ +--- +> // -3 < e <= 0: plain format with leading zeroes. +439c556 +< /* -3 >= e | e > 7: computerized scientific notation */ +--- +> // -3 >= e | e > 7: computerized scientific notation +457,458c574,575 +< * Left-to-right digits extraction: +< * algorithm 1 in [3], with b = 10, k = 8, n = 28. +--- +> Left-to-right digits extraction: +> algorithm 1 in [3], with b = 10, k = 8, n = 28. +472c589 +< /* ... but do not remove the one directly to the right of '.' */ +--- +> // ... but do not remove the one directly to the right of '.' +480,485c597,602 +< * Algorithm 1 in [3] needs computation of +< * floor((a + 1) 2^n / b^k) - 1 +< * with a < 10^8, b = 10, k = 8, n = 28. +< * Noting that +< * (a + 1) 2^n <= 10^8 2^28 < 10^17 +< * For n = 17, m = 8 the table in section 10 of [1] leads to: +--- +> Algorithm 1 in [3] needs computation of +> floor((a + 1) 2^n / b^k) - 1 +> with a < 10^8, b = 10, k = 8, n = 28. +> Noting that +> (a + 1) 2^n <= 10^8 2^28 < 10^17 +> For n = 17, m = 8 the table in section 10 of [1] leads to: +505,506c622,623 +< * For n = 3, m = 2 the table in section 10 of [1] shows +< * floor(e / 100) = floor(1_311 e / 2^17) +--- +> For n = 3, m = 2 the table in section 10 of [1] shows +> floor(e / 100) = floor(1_311 e / 2^17) +513,514c630,631 +< * For n = 2, m = 1 the table in section 10 of [1] shows +< * floor(e / 10) = floor(103 e / 2^10) +--- +> For n = 2, m = 1 the table in section 10 of [1] shows +> floor(e / 10) = floor(103 e / 2^10) +529c646 +< /* Using the deprecated constructor enhances performance */ +--- +> // Using the deprecated constructor enhances performance. diff --git a/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_imports/diff_FloatToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_imports/diff_FloatToDecimal.java.txt new file mode 100644 index 0000000000..846513122f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_imports/diff_FloatToDecimal.java.txt @@ -0,0 +1,577 @@ +2,3c2 +< * Copyright (c) 2021, 2022, Oracle and/or its affiliates. All rights reserved. +< * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. +--- +> * Copyright 2018-2020 Raffaello Giulietti +5,9c4,9 +< * This code is free software; you can redistribute it and/or modify it +< * under the terms of the GNU General Public License version 2 only, as +< * published by the Free Software Foundation. Oracle designates this +< * particular file as subject to the "Classpath" exception as provided +< * by Oracle in the LICENSE file that accompanied this code. +--- +> * Permission is hereby granted, free of charge, to any person obtaining a copy +> * of this software and associated documentation files (the "Software"), to deal +> * in the Software without restriction, including without limitation the rights +> * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> * copies of the Software, and to permit persons to whom the Software is +> * furnished to do so, subject to the following conditions: +11,15c11,12 +< * This code is distributed in the hope that it will be useful, but WITHOUT +< * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +< * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +< * version 2 for more details (a copy is included in the LICENSE file that +< * accompanied this code). +--- +> * The above copyright notice and this permission notice shall be included in +> * all copies or substantial portions of the Software. +17,23c14,20 +< * You should have received a copy of the GNU General Public License version +< * 2 along with this work; if not, write to the Free Software Foundation, +< * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. +< * +< * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA +< * or visit www.oracle.com if you need additional information or have any +< * questions. +--- +> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +> * THE SOFTWARE. +30,32d26 +< import static java.lang.Float.floatToRawIntBits; +< import static java.lang.Integer.numberOfLeadingZeros; +< +39a34,36 +> import static java.lang.Float.floatToRawIntBits; +> import static java.lang.Integer.numberOfLeadingZeros; +> +41a39,40 +> * +> * @author Raffaello Giulietti +45,56c44,55 +< * For full details about this code see the following references: +< * +< * [1] Giulietti, "The Schubfach way to render doubles", +< * https://drive.google.com/file/d/1gp5xv4CAa78SVgCeWfGqqI4FfYYYuNFb +< * +< * [2] IEEE Computer Society, "IEEE Standard for Floating-Point Arithmetic" +< * +< * [3] Bouvier & Zimmermann, "Division-Free Binary-to-Decimal Conversion" +< * +< * Divisions are avoided altogether for the benefit of those architectures +< * that do not provide specific machine instructions or where they are slow. +< * This is discussed in section 10 of [1]. +--- +> For full details about this code see the following references: +> +> [1] Giulietti, "The Schubfach way to render doubles", +> https://drive.google.com/open?id=1luHhyQF9zKlM8yJ1nebU0OgVYhfC6CBN +> +> [2] IEEE Computer Society, "IEEE Standard for Floating-Point Arithmetic" +> +> [3] Bouvier & Zimmermann, "Division-Free Binary-to-Decimal Conversion" +> +> Divisions are avoided altogether for the benefit of those architectures +> that do not provide specific machine instructions or where they are slow. +> This is discussed in section 10 of [1]. +59c58,60 +< /* The precision in bits */ +--- +> // Sources with the license are here: https://github.com/c4f7fcce9cb06515/Schubfach/blob/3c92d3c9b1fead540616c918cdfef432bca53dfa/todec/src/math/FloatToDecimal.java +> +> // The precision in bits. +62c63 +< /* Exponent width in bits */ +--- +> // Exponent width in bits. +65,66c66,67 +< /* Minimum value of the exponent: -(2^(W-1)) - P + 3 */ +< static final int Q_MIN = (-1 << (W - 1)) - P + 3; +--- +> // Minimum value of the exponent: -(2^(W-1)) - P + 3. +> static final int Q_MIN = (-1 << W - 1) - P + 3; +68,69c69,70 +< /* Maximum value of the exponent: 2^(W-1) - P */ +< static final int Q_MAX = (1 << (W - 1)) - P; +--- +> // Maximum value of the exponent: 2^(W-1) - P. +> static final int Q_MAX = (1 << W - 1) - P; +71c72 +< /* 10^(E_MIN - 1) <= MIN_VALUE < 10^E_MIN */ +--- +> // 10^(E_MIN - 1) <= MIN_VALUE < 10^E_MIN +74c75 +< /* 10^(E_MAX - 1) <= MAX_VALUE < 10^E_MAX */ +--- +> // 10^(E_MAX - 1) <= MAX_VALUE < 10^E_MAX +77c78 +< /* Threshold to detect tiny values, as in section 8.2.1 of [1] */ +--- +> // Threshold to detect tiny values, as in section 8.1.1 of [1] +80c81 +< /* The minimum and maximum k, as in section 8 of [1] */ +--- +> // The minimum and maximum k, as in section 8 of [1] +84c85 +< /* H is as in section 8.1 of [1] */ +--- +> // H is as in section 8 of [1]. +87,88c88,89 +< /* Minimum value of the significand of a normal value: 2^(P-1) */ +< private static final int C_MIN = 1 << (P - 1); +--- +> // Minimum value of the significand of a normal value: 2^(P-1). +> private static final int C_MIN = 1 << P - 1; +90c91 +< /* Mask to extract the biased exponent */ +--- +> // Mask to extract the biased exponent. +93,94c94,95 +< /* Mask to extract the fraction bits */ +< private static final int T_MASK = (1 << (P - 1)) - 1; +--- +> // Mask to extract the fraction bits. +> private static final int T_MASK = (1 << P - 1) - 1; +96c97 +< /* Used in rop() */ +--- +> // Used in rop(). +99c100 +< /* Used for left-to-tight digit extraction */ +--- +> // Used for left-to-tight digit extraction. +102,107c103,112 +< private static final int NON_SPECIAL = 0; +< private static final int PLUS_ZERO = 1; +< private static final int MINUS_ZERO = 2; +< private static final int PLUS_INF = 3; +< private static final int MINUS_INF = 4; +< private static final int NAN = 5; +--- +> private static final int NON_SPECIAL = 0; +> private static final int PLUS_ZERO = 1; +> private static final int MINUS_ZERO = 2; +> private static final int PLUS_INF = 3; +> private static final int MINUS_INF = 4; +> private static final int NAN = 5; +> +> // For thread-safety, each thread gets its own instance of this class. +> private static final ThreadLocal threadLocal = +> ThreadLocal.withInitial(FloatToDecimal::new); +110,114c115,119 +< * Room for the longer of the forms +< * -ddddd.dddd H + 2 characters +< * -0.00ddddddddd H + 5 characters +< * -d.ddddddddE-ee H + 6 characters +< * where there are H digits d +--- +> Room for the longer of the forms +> -ddddd.dddd H + 2 characters +> -0.00ddddddddd H + 5 characters +> -d.ddddddddE-ee H + 6 characters +> where there are H digits d +116c121 +< public static final int MAX_CHARS = H + 6; +--- +> public final int MAX_CHARS = H + 6; +117a123 +> // Numerical results are created here... +120c126,129 +< /* Index into bytes of rightmost valid character */ +--- +> // ... and copied here in appendTo() +> private final char[] chars = new char[MAX_CHARS]; +> +> // Index into buf of rightmost valid character. +127,128c136,168 +< * Returns a string representation of the {@code float} +< * argument. All characters mentioned below are ASCII characters. +--- +> * Returns a string rendering of the {@code float} argument. +> * +> *

    The characters of the result are all drawn from the ASCII set. +> *

      +> *
    • Any NaN, whether quiet or signaling, is rendered as +> * {@code "NaN"}, regardless of the sign bit. +> *
    • The infinities +∞ and -∞ are rendered as +> * {@code "Infinity"} and {@code "-Infinity"}, respectively. +> *
    • The positive and negative zeroes are rendered as +> * {@code "0.0"} and {@code "-0.0"}, respectively. +> *
    • A finite negative {@code v} is rendered as the sign +> * '{@code -}' followed by the rendering of the magnitude -{@code v}. +> *
    • A finite positive {@code v} is rendered in two stages: +> *
        +> *
      • Selection of a decimal: A well-defined +> * decimal dv is selected +> * to represent {@code v}. +> *
      • Formatting as a string: The decimal +> * dv is formatted as a string, +> * either in plain or in computerized scientific notation, +> * depending on its value. +> *
      +> *
    +> * +> *

    A decimal is a number of the form +> * d×10i +> * for some (unique) integers d > 0 and i such that +> * d is not a multiple of 10. +> * These integers are the significand and +> * the exponent, respectively, of the decimal. +> * The length of the decimal is the (unique) +> * integer n meeting +> * 10n-1d < 10n. +130,132c170,246 +< * @param v the {@code float} to be converted. +< * @return a string representation of the argument. +< * @see Float#toString(float) +--- +> *

    The decimal dv +> * for a finite positive {@code v} is defined as follows: +> *

      +> *
    • Let R be the set of all decimals that round to {@code v} +> * according to the usual round-to-closest rule of +> * IEEE 754 floating-point arithmetic. +> *
    • Let m be the minimal length over all decimals in R. +> *
    • When m ≥ 2, let T be the set of all decimals +> * in R with length m. +> * Otherwise, let T be the set of all decimals +> * in R with length 1 or 2. +> *
    • Define dv as +> * the decimal in T that is closest to {@code v}. +> * Or if there are two such decimals in T, +> * select the one with the even significand (there is exactly one). +> *
    +> * +> *

    The (uniquely) selected decimal dv +> * is then formatted. +> * +> *

    Let d, i and n be the significand, exponent and +> * length of dv, respectively. +> * Further, let e = n + i - 1 and let +> * d1dn +> * be the usual decimal expansion of the significand. +> * Note that d1 ≠ 0 ≠ dn. +> *

      +> *
    • Case -3 ≤ e < 0: +> * dv is formatted as +> * 0.00d1dn, +> * where there are exactly -(n + i) zeroes between +> * the decimal point and d1. +> * For example, 123 × 10-4 is formatted as +> * {@code 0.0123}. +> *
    • Case 0 ≤ e < 7: +> *
        +> *
      • Subcase i ≥ 0: +> * dv is formatted as +> * d1dn00.0, +> * where there are exactly i zeroes +> * between dn and the decimal point. +> * For example, 123 × 102 is formatted as +> * {@code 12300.0}. +> *
      • Subcase i < 0: +> * dv is formatted as +> * d1dn+i.dn+i+1dn. +> * There are exactly -i digits to the right of +> * the decimal point. +> * For example, 123 × 10-1 is formatted as +> * {@code 12.3}. +> *
      +> *
    • Case e < -3 or e ≥ 7: +> * computerized scientific notation is used to format +> * dv. +> * Here e is formatted as by {@link Integer#toString(int)}. +> *
        +> *
      • Subcase n = 1: +> * dv is formatted as +> * d1.0Ee. +> * For example, 1 × 1023 is formatted as +> * {@code 1.0E23}. +> *
      • Subcase n > 1: +> * dv is formatted as +> * d1.d2dnEe. +> * For example, 123 × 10-21 is formatted as +> * {@code 1.23E-19}. +> *
      +> *
    +> * +> * @param v the {@code float} to be rendered. +> * @return a string rendering of the argument. +135c249 +< return new FloatToDecimal().toDecimalString(v); +--- +> return threadLocalInstance().toDecimalString(v); +151c265,269 +< return new FloatToDecimal().appendDecimalTo(v, app); +--- +> return threadLocalInstance().appendDecimalTo(v, app); +> } +> +> private static FloatToDecimal threadLocalInstance() { +> return threadLocal.get(); +169,170c287 +< char[] chars = new char[index + 1]; +< for (int i = 0; i < chars.length; ++i) { +--- +> for (int i = 0; i <= index; ++i) { +174c291 +< return ((StringBuilder) app).append(chars); +--- +> return ((StringBuilder) app).append(chars, 0, index + 1); +177c294 +< return ((StringBuffer) app).append(chars); +--- +> return ((StringBuffer) app).append(chars, 0, index + 1); +179,180c296,297 +< for (char c : chars) { +< app.append(c); +--- +> for (int i = 0; i <= index; ++i) { +> app.append(chars[i]); +192,197c309,314 +< * Returns +< * PLUS_ZERO iff v is 0.0 +< * MINUS_ZERO iff v is -0.0 +< * PLUS_INF iff v is POSITIVE_INFINITY +< * MINUS_INF iff v is NEGATIVE_INFINITY +< * NAN iff v is NaN +--- +> Returns +> PLUS_ZERO iff v is 0.0 +> MINUS_ZERO iff v is -0.0 +> PLUS_INF iff v is POSITIVE_INFINITY +> MINUS_INF iff v is NEGATIVE_INFINITY +> NAN iff v is NaN +201,207c318,324 +< * For full details see references [2] and [1]. +< * +< * For finite v != 0, determine integers c and q such that +< * |v| = c 2^q and +< * Q_MIN <= q <= Q_MAX and +< * either 2^(P-1) <= c < 2^P (normal) +< * or 0 < c < 2^(P-1) and q = Q_MIN (subnormal) +--- +> For full details see references [2] and [1]. +> +> For finite v != 0, determine integers c and q such that +> |v| = c 2^q and +> Q_MIN <= q <= Q_MAX and +> either 2^(P-1) <= c < 2^P (normal) +> or 0 < c < 2^(P-1) and q = Q_MIN (subnormal) +218c335 +< /* normal value. Here mq = -q */ +--- +> // normal value. Here mq = -q +221c338 +< /* The fast path discussed in section 8.3 of [1] */ +--- +> // The fast path discussed in section 8.2 of [1]. +231c348 +< /* subnormal value */ +--- +> // subnormal value +246,261c363,378 +< * The skeleton corresponds to figure 7 of [1]. +< * The efficient computations are those summarized in figure 9. +< * Also check the appendix. +< * +< * Here's a correspondence between Java names and names in [1], +< * expressed as approximate LaTeX source code and informally. +< * Other names are identical. +< * cb: \bar{c} "c-bar" +< * cbr: \bar{c}_r "c-bar-r" +< * cbl: \bar{c}_l "c-bar-l" +< * +< * vb: \bar{v} "v-bar" +< * vbr: \bar{v}_r "v-bar-r" +< * vbl: \bar{v}_l "v-bar-l" +< * +< * rop: r_o' "r-o-prime" +--- +> The skeleton corresponds to figure 4 of [1]. +> The efficient computations are those summarized in figure 7. +> Also check the appendix. +> +> Here's a correspondence between Java names and names in [1], +> expressed as approximate LaTeX source code and informally. +> Other names are identical. +> cb: \bar{c} "c-bar" +> cbr: \bar{c}_r "c-bar-r" +> cbl: \bar{c}_l "c-bar-l" +> +> vb: \bar{v} "v-bar" +> vbr: \bar{v}_r "v-bar-r" +> vbl: \bar{v}_l "v-bar-l" +> +> rop: r_o' "r-o-prime" +269,271c386,388 +< * flog10pow2(e) = floor(log_10(2^e)) +< * flog10threeQuartersPow2(e) = floor(log_10(3/4 2^e)) +< * flog2pow10(e) = floor(log_2(10^e)) +--- +> flog10pow2(e) = floor(log_10(2^e)) +> flog10threeQuartersPow2(e) = floor(log_10(3/4 2^e)) +> flog2pow10(e) = floor(log_2(10^e)) +274c391 +< /* regular spacing */ +--- +> // regular spacing +278c395 +< /* irregular spacing */ +--- +> // irregular spacing0 +284c401 +< /* g is as in the appendix */ +--- +> // g is as in the appendix +294,301c411,418 +< * For n = 9, m = 1 the table in section 10 of [1] shows +< * s' = floor(s / 10) = floor(s 1_717_986_919 / 2^34) +< * +< * sp10 = 10 s' +< * tp10 = 10 t' +< * upin iff u' = sp10 10^k in Rv +< * wpin iff w' = tp10 10^k in Rv +< * See section 9.3 of [1]. +--- +> For n = 9, m = 1 the table in section 10 of [1] shows +> s' = floor(s / 10) = floor(s 1_717_986_919 / 2^34) +> +> sp10 = 10 s' +> tp10 = 10 t' +> upin iff u' = sp10 10^k in Rv +> wpin iff w' = tp10 10^k in Rv +> See section 9.4 of [1]. +313,316c430,433 +< * 10 <= s < 100 or s >= 100 and u', w' not in Rv +< * uin iff u = s 10^k in Rv +< * win iff w = t 10^k in Rv +< * See section 9.3 of [1]. +--- +> 10 <= s < 100 or s >= 100 and u', w' not in Rv +> uin iff u = s 10^k in Rv +> win iff w = t 10^k in Rv +> See section 9.4 of [1]. +322c439 +< /* Exactly one of u or w lies in Rv */ +--- +> // Exactly one of u or w lies in Rv. +326,327c443,444 +< * Both u and w lie in Rv: determine the one closest to v. +< * See section 9.3 of [1]. +--- +> Both u and w lie in Rv: determine the one closest to v. +> See section 9.4 of [1]. +334,335c451,452 +< * Computes rop(cp g 2^(-95)) +< * See appendix and figure 11 of [1]. +--- +> Computes rop(cp g 2^(-95)) +> See appendix and figure 8 of [1]. +344c461 +< * Formats the decimal f 10^e. +--- +> Formats the decimal f 10^e. +348,351c465,468 +< * For details not discussed here see section 10 of [1]. +< * +< * Determine len such that +< * 10^(len-1) <= f < 10^len +--- +> For details not discussed here see section 10 of [1]. +> +> Determine len such that +> 10^(len-1) <= f < 10^len +359,362c476,479 +< * Let fp and ep be the original f and e, respectively. +< * Transform f and e to ensure +< * 10^(H-1) <= f < 10^H +< * fp 10^ep = f 10^(e-H) = 0.f 10^e +--- +> Let fp and ep be the original f and e, respectively. +> Transform f and e to ensure +> 10^(H-1) <= f < 10^H +> fp 10^ep = f 10^(e-H) = 0.f 10^e +364c481 +< f *= (int)pow10(H - len); +--- +> f *= pow10(H - len); +368,375c485,492 +< * The toChars?() methods perform left-to-right digits extraction +< * using ints, provided that the arguments are limited to 8 digits. +< * Therefore, split the H = 9 digits of f into: +< * h = the most significant digit of f +< * l = the last 8, least significant digits of f +< * +< * For n = 9, m = 8 the table in section 10 of [1] shows +< * floor(f / 10^8) = floor(1_441_151_881 f / 2^57) +--- +> The toChars?() methods perform left-to-right digits extraction +> using ints, provided that the arguments are limited to 8 digits. +> Therefore, split the H = 9 digits of f into: +> h = the most significant digit of f +> l = the last 8, least significant digits of f +> +> For n = 9, m = 8 the table in section 10 of [1] shows +> floor(f / 10^8) = floor(1_441_151_881 f / 2^57) +391,393c508,510 +< * 0 < e <= 7: plain format without leading zeroes. +< * Left-to-right digits extraction: +< * algorithm 1 in [3], with b = 10, k = 8, n = 28. +--- +> 0 < e <= 7: plain format without leading zeroes. +> Left-to-right digits extraction: +> algorithm 1 in [3], with b = 10, k = 8, n = 28. +415c532 +< /* -3 < e <= 0: plain format with leading zeroes */ +--- +> // -3 < e <= 0: plain format with leading zeroes. +428c545 +< /* -3 >= e | e > 7: computerized scientific notation */ +--- +> // -3 >= e | e > 7: computerized scientific notation +439,440c556,557 +< * Left-to-right digits extraction: +< * algorithm 1 in [3], with b = 10, k = 8, n = 28. +--- +> Left-to-right digits extraction: +> algorithm 1 in [3], with b = 10, k = 8, n = 28. +454c571 +< /* ... but do not remove the one directly to the right of '.' */ +--- +> // ... but do not remove the one directly to the right of '.' +462,467c579,584 +< * Algorithm 1 in [3] needs computation of +< * floor((a + 1) 2^n / b^k) - 1 +< * with a < 10^8, b = 10, k = 8, n = 28. +< * Noting that +< * (a + 1) 2^n <= 10^8 2^28 < 10^17 +< * For n = 17, m = 8 the table in section 10 of [1] leads to: +--- +> Algorithm 1 in [3] needs computation of +> floor((a + 1) 2^n / b^k) - 1 +> with a < 10^8, b = 10, k = 8, n = 28. +> Noting that +> (a + 1) 2^n <= 10^8 2^28 < 10^17 +> For n = 17, m = 8 the table in section 10 of [1] leads to: +485,486c602,603 +< * For n = 2, m = 1 the table in section 10 of [1] shows +< * floor(e / 10) = floor(103 e / 2^10) +--- +> For n = 2, m = 1 the table in section 10 of [1] shows +> floor(e / 10) = floor(103 e / 2^10) +501c618 +< /* Using the deprecated constructor enhances performance */ +--- +> // Using the deprecated constructor enhances performance. +507a625 +> diff --git a/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_imports_ignorespace/diff_DoubleToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_imports_ignorespace/diff_DoubleToDecimal.java.txt new file mode 100644 index 0000000000..8d03f594b1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_imports_ignorespace/diff_DoubleToDecimal.java.txt @@ -0,0 +1,585 @@ +2,3c2 +< * Copyright (c) 2021, 2022, Oracle and/or its affiliates. All rights reserved. +< * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. +--- +> * Copyright 2018-2020 Raffaello Giulietti +5,9c4,9 +< * This code is free software; you can redistribute it and/or modify it +< * under the terms of the GNU General Public License version 2 only, as +< * published by the Free Software Foundation. Oracle designates this +< * particular file as subject to the "Classpath" exception as provided +< * by Oracle in the LICENSE file that accompanied this code. +--- +> * Permission is hereby granted, free of charge, to any person obtaining a copy +> * of this software and associated documentation files (the "Software"), to deal +> * in the Software without restriction, including without limitation the rights +> * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> * copies of the Software, and to permit persons to whom the Software is +> * furnished to do so, subject to the following conditions: +11,15c11,12 +< * This code is distributed in the hope that it will be useful, but WITHOUT +< * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +< * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +< * version 2 for more details (a copy is included in the LICENSE file that +< * accompanied this code). +--- +> * The above copyright notice and this permission notice shall be included in +> * all copies or substantial portions of the Software. +17,23c14,20 +< * You should have received a copy of the GNU General Public License version +< * 2 along with this work; if not, write to the Free Software Foundation, +< * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. +< * +< * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA +< * or visit www.oracle.com if you need additional information or have any +< * questions. +--- +> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +> * THE SOFTWARE. +30,32d26 +< import static java.lang.Double.doubleToRawLongBits; +< import static java.lang.Long.numberOfLeadingZeros; +< +40a35,37 +> import static java.lang.Double.doubleToRawLongBits; +> import static java.lang.Long.numberOfLeadingZeros; +> +42a40,41 +> * +> * @author Raffaello Giulietti +46,57c45,56 +< * For full details about this code see the following references: +< * +< * [1] Giulietti, "The Schubfach way to render doubles", +< * https://drive.google.com/file/d/1gp5xv4CAa78SVgCeWfGqqI4FfYYYuNFb +< * +< * [2] IEEE Computer Society, "IEEE Standard for Floating-Point Arithmetic" +< * +< * [3] Bouvier & Zimmermann, "Division-Free Binary-to-Decimal Conversion" +< * +< * Divisions are avoided altogether for the benefit of those architectures +< * that do not provide specific machine instructions or where they are slow. +< * This is discussed in section 10 of [1]. +--- +> For full details about this code see the following references: +> +> [1] Giulietti, "The Schubfach way to render doubles", +> https://drive.google.com/open?id=1luHhyQF9zKlM8yJ1nebU0OgVYhfC6CBN +> +> [2] IEEE Computer Society, "IEEE Standard for Floating-Point Arithmetic" +> +> [3] Bouvier & Zimmermann, "Division-Free Binary-to-Decimal Conversion" +> +> Divisions are avoided altogether for the benefit of those architectures +> that do not provide specific machine instructions or where they are slow. +> This is discussed in section 10 of [1]. +60c59,61 +< /* The precision in bits */ +--- +> // Sources with the license are here: https://github.com/c4f7fcce9cb06515/Schubfach/blob/3c92d3c9b1fead540616c918cdfef432bca53dfa/todec/src/math/FloatToDecimal.java +> +> // The precision in bits. +63c64 +< /* Exponent width in bits */ +--- +> // Exponent width in bits. +66,67c67,68 +< /* Minimum value of the exponent: -(2^(W-1)) - P + 3 */ +< static final int Q_MIN = (-1 << (W - 1)) - P + 3; +--- +> // Minimum value of the exponent: -(2^(W-1)) - P + 3. +> static final int Q_MIN = (-1 << W - 1) - P + 3; +69,70c70,71 +< /* Maximum value of the exponent: 2^(W-1) - P */ +< static final int Q_MAX = (1 << (W - 1)) - P; +--- +> // Maximum value of the exponent: 2^(W-1) - P. +> static final int Q_MAX = (1 << W - 1) - P; +72c73 +< /* 10^(E_MIN - 1) <= MIN_VALUE < 10^E_MIN */ +--- +> // 10^(E_MIN - 1) <= MIN_VALUE < 10^E_MIN +75c76 +< /* 10^(E_MAX - 1) <= MAX_VALUE < 10^E_MAX */ +--- +> // 10^(E_MAX - 1) <= MAX_VALUE < 10^E_MAX +78c79 +< /* Threshold to detect tiny values, as in section 8.2.1 of [1] */ +--- +> // Threshold to detect tiny values, as in section 8.1.1 of [1] +81c82 +< /* The minimum and maximum k, as in section 8 of [1] */ +--- +> // The minimum and maximum k, as in section 8 of [1] +85c86 +< /* H is as in section 8.1 of [1] */ +--- +> // H is as in section 8 of [1]. +88,89c89,90 +< /* Minimum value of the significand of a normal value: 2^(P-1) */ +< private static final long C_MIN = 1L << (P - 1); +--- +> // Minimum value of the significand of a normal value: 2^(P-1). +> private static final long C_MIN = 1L << P - 1; +91c92 +< /* Mask to extract the biased exponent */ +--- +> // Mask to extract the biased exponent. +94,95c95,96 +< /* Mask to extract the fraction bits */ +< private static final long T_MASK = (1L << (P - 1)) - 1; +--- +> // Mask to extract the fraction bits. +> private static final long T_MASK = (1L << P - 1) - 1; +97c98 +< /* Used in rop() */ +--- +> // Used in rop(). +100c101 +< /* Used for left-to-tight digit extraction */ +--- +> // Used for left-to-tight digit extraction. +103,108c104,113 +< private static final int NON_SPECIAL = 0; +< private static final int PLUS_ZERO = 1; +< private static final int MINUS_ZERO = 2; +< private static final int PLUS_INF = 3; +< private static final int MINUS_INF = 4; +< private static final int NAN = 5; +--- +> private static final int NON_SPECIAL = 0; +> private static final int PLUS_ZERO = 1; +> private static final int MINUS_ZERO = 2; +> private static final int PLUS_INF = 3; +> private static final int MINUS_INF = 4; +> private static final int NAN = 5; +> +> // For thread-safety, each thread gets its own instance of this class. +> private static final ThreadLocal threadLocal = +> ThreadLocal.withInitial(DoubleToDecimal::new); +111,115c116,120 +< * Room for the longer of the forms +< * -ddddd.dddddddddddd H + 2 characters +< * -0.00ddddddddddddddddd H + 5 characters +< * -d.ddddddddddddddddE-eee H + 7 characters +< * where there are H digits d +--- +> Room for the longer of the forms +> -ddddd.dddddddddddd H + 2 characters +> -0.00ddddddddddddddddd H + 5 characters +> -d.ddddddddddddddddE-eee H + 7 characters +> where there are H digits d +117c122 +< public static final int MAX_CHARS = H + 7; +--- +> public final int MAX_CHARS = H + 7; +118a124 +> // Numerical results are created here... +121c127,130 +< /* Index into bytes of rightmost valid character */ +--- +> // ... and copied here in appendTo() +> private final char[] chars = new char[MAX_CHARS]; +> +> // Index into bytes of rightmost valid character. +128,129c137,189 +< * Returns a string representation of the {@code double} +< * argument. All characters mentioned below are ASCII characters. +--- +> * Returns a string rendering of the {@code double} argument. +> * +> *

    The characters of the result are all drawn from the ASCII set. +> *

      +> *
    • Any NaN, whether quiet or signaling, is rendered as +> * {@code "NaN"}, regardless of the sign bit. +> *
    • The infinities +∞ and -∞ are rendered as +> * {@code "Infinity"} and {@code "-Infinity"}, respectively. +> *
    • The positive and negative zeroes are rendered as +> * {@code "0.0"} and {@code "-0.0"}, respectively. +> *
    • A finite negative {@code v} is rendered as the sign +> * '{@code -}' followed by the rendering of the magnitude -{@code v}. +> *
    • A finite positive {@code v} is rendered in two stages: +> *
        +> *
      • Selection of a decimal: A well-defined +> * decimal dv is selected +> * to represent {@code v}. +> *
      • Formatting as a string: The decimal +> * dv is formatted as a string, +> * either in plain or in computerized scientific notation, +> * depending on its value. +> *
      +> *
    +> * +> *

    A decimal is a number of the form +> * d×10i +> * for some (unique) integers d > 0 and i such that +> * d is not a multiple of 10. +> * These integers are the significand and +> * the exponent, respectively, of the decimal. +> * The length of the decimal is the (unique) +> * integer n meeting +> * 10n-1d < 10n. +> * +> *

    The decimal dv +> * for a finite positive {@code v} is defined as follows: +> *

      +> *
    • Let R be the set of all decimals that round to {@code v} +> * according to the usual round-to-closest rule of +> * IEEE 754 floating-point arithmetic. +> *
    • Let m be the minimal length over all decimals in R. +> *
    • When m ≥ 2, let T be the set of all decimals +> * in R with length m. +> * Otherwise, let T be the set of all decimals +> * in R with length 1 or 2. +> *
    • Define dv as +> * the decimal in T that is closest to {@code v}. +> * Or if there are two such decimals in T, +> * select the one with the even significand (there is exactly one). +> *
    +> * +> *

    The (uniquely) selected decimal dv +> * is then formatted. +131,133c191,247 +< * @param v the {@code double} to be converted. +< * @return a string representation of the argument. +< * @see Double#toString(double) +--- +> *

    Let d, i and n be the significand, exponent and +> * length of dv, respectively. +> * Further, let e = n + i - 1 and let +> * d1dn +> * be the usual decimal expansion of the significand. +> * Note that d1 ≠ 0 ≠ dn. +> *

      +> *
    • Case -3 ≤ e < 0: +> * dv is formatted as +> * 0.00d1dn, +> * where there are exactly -(n + i) zeroes between +> * the decimal point and d1. +> * For example, 123 × 10-4 is formatted as +> * {@code 0.0123}. +> *
    • Case 0 ≤ e < 7: +> *
        +> *
      • Subcase i ≥ 0: +> * dv is formatted as +> * d1dn00.0, +> * where there are exactly i zeroes +> * between dn and the decimal point. +> * For example, 123 × 102 is formatted as +> * {@code 12300.0}. +> *
      • Subcase i < 0: +> * dv is formatted as +> * d1dn+i.dn+i+1dn. +> * There are exactly -i digits to the right of +> * the decimal point. +> * For example, 123 × 10-1 is formatted as +> * {@code 12.3}. +> *
      +> *
    • Case e < -3 or e ≥ 7: +> * computerized scientific notation is used to format +> * dv. +> * Here e is formatted as by {@link Integer#toString(int)}. +> *
        +> *
      • Subcase n = 1: +> * dv is formatted as +> * d1.0Ee. +> * For example, 1 × 1023 is formatted as +> * {@code 1.0E23}. +> *
      • Subcase n > 1: +> * dv is formatted as +> * d1.d2dnEe. +> * For example, 123 × 10-21 is formatted as +> * {@code 1.23E-19}. +> *
      +> *
    +> * +> * @param v the {@code double} to be rendered. +> * @return a string rendering of the argument. +136c250 +< return new DoubleToDecimal().toDecimalString(v); +--- +> return threadLocalInstance().toDecimalString(v); +152c266,270 +< return new DoubleToDecimal().appendDecimalTo(v, app); +--- +> return threadLocalInstance().appendDecimalTo(v, app); +> } +> +> private static DoubleToDecimal threadLocalInstance() { +> return threadLocal.get(); +170,171c288 +< char[] chars = new char[index + 1]; +< for (int i = 0; i < chars.length; ++i) { +--- +> for (int i = 0; i <= index; ++i) { +175c292 +< return ((StringBuilder) app).append(chars); +--- +> return ((StringBuilder) app).append(chars, 0, index + 1); +178c295 +< return ((StringBuffer) app).append(chars); +--- +> return ((StringBuffer) app).append(chars, 0, index + 1); +180,181c297,298 +< for (char c : chars) { +< app.append(c); +--- +> for (int i = 0; i <= index; ++i) { +> app.append(chars[i]); +193,198c310,315 +< * Returns +< * PLUS_ZERO iff v is 0.0 +< * MINUS_ZERO iff v is -0.0 +< * PLUS_INF iff v is POSITIVE_INFINITY +< * MINUS_INF iff v is NEGATIVE_INFINITY +< * NAN iff v is NaN +--- +> Returns +> PLUS_ZERO iff v is 0.0 +> MINUS_ZERO iff v is -0.0 +> PLUS_INF iff v is POSITIVE_INFINITY +> MINUS_INF iff v is NEGATIVE_INFINITY +> NAN iff v is NaN +202,208c319,325 +< * For full details see references [2] and [1]. +< * +< * For finite v != 0, determine integers c and q such that +< * |v| = c 2^q and +< * Q_MIN <= q <= Q_MAX and +< * either 2^(P-1) <= c < 2^P (normal) +< * or 0 < c < 2^(P-1) and q = Q_MIN (subnormal) +--- +> For full details see references [2] and [1]. +> +> For finite v != 0, determine integers c and q such that +> |v| = c 2^q and +> Q_MIN <= q <= Q_MAX and +> either 2^(P-1) <= c < 2^P (normal) +> or 0 < c < 2^(P-1) and q = Q_MIN (subnormal) +219c336 +< /* normal value. Here mq = -q */ +--- +> // normal value. Here mq = -q +222c339 +< /* The fast path discussed in section 8.3 of [1] */ +--- +> // The fast path discussed in section 8.2 of [1]. +232c349 +< /* subnormal value */ +--- +> // subnormal value +247,261c364,378 +< * The skeleton corresponds to figure 7 of [1]. +< * The efficient computations are those summarized in figure 9. +< * +< * Here's a correspondence between Java names and names in [1], +< * expressed as approximate LaTeX source code and informally. +< * Other names are identical. +< * cb: \bar{c} "c-bar" +< * cbr: \bar{c}_r "c-bar-r" +< * cbl: \bar{c}_l "c-bar-l" +< * +< * vb: \bar{v} "v-bar" +< * vbr: \bar{v}_r "v-bar-r" +< * vbl: \bar{v}_l "v-bar-l" +< * +< * rop: r_o' "r-o-prime" +--- +> The skeleton corresponds to figure 4 of [1]. +> The efficient computations are those summarized in figure 7. +> +> Here's a correspondence between Java names and names in [1], +> expressed as approximate LaTeX source code and informally. +> Other names are identical. +> cb: \bar{c} "c-bar" +> cbr: \bar{c}_r "c-bar-r" +> cbl: \bar{c}_l "c-bar-l" +> +> vb: \bar{v} "v-bar" +> vbr: \bar{v}_r "v-bar-r" +> vbl: \bar{v}_l "v-bar-l" +> +> rop: r_o' "r-o-prime" +269,271c386,388 +< * flog10pow2(e) = floor(log_10(2^e)) +< * flog10threeQuartersPow2(e) = floor(log_10(3/4 2^e)) +< * flog2pow10(e) = floor(log_2(10^e)) +--- +> flog10pow2(e) = floor(log_10(2^e)) +> flog10threeQuartersPow2(e) = floor(log_10(3/4 2^e)) +> flog2pow10(e) = floor(log_2(10^e)) +274c391 +< /* regular spacing */ +--- +> // regular spacing +278c395 +< /* irregular spacing */ +--- +> // irregular spacing +284c401 +< /* g1 and g0 are as in section 9.8.3 of [1], so g = g1 2^63 + g0 */ +--- +> // g1 and g0 are as in section 9.9.3 of [1], so g = g1 2^63 + g0 +295,303c412,420 +< * For n = 17, m = 1 the table in section 10 of [1] shows +< * s' = floor(s / 10) = floor(s 115_292_150_460_684_698 / 2^60) +< * = floor(s 115_292_150_460_684_698 2^4 / 2^64) +< * +< * sp10 = 10 s' +< * tp10 = 10 t' +< * upin iff u' = sp10 10^k in Rv +< * wpin iff w' = tp10 10^k in Rv +< * See section 9.3 of [1]. +--- +> For n = 17, m = 1 the table in section 10 of [1] shows +> s' = floor(s / 10) = floor(s 115_292_150_460_684_698 / 2^60) +> = floor(s 115_292_150_460_684_698 2^4 / 2^64) +> +> sp10 = 10 s' +> tp10 = 10 t' +> upin iff u' = sp10 10^k in Rv +> wpin iff w' = tp10 10^k in Rv +> See section 9.4 of [1]. +315,318c432,435 +< * 10 <= s < 100 or s >= 100 and u', w' not in Rv +< * uin iff u = s 10^k in Rv +< * win iff w = t 10^k in Rv +< * See section 9.3 of [1]. +--- +> 10 <= s < 100 or s >= 100 and u', w' not in Rv +> uin iff u = s 10^k in Rv +> win iff w = t 10^k in Rv +> See section 9.4 of [1]. +324c441 +< /* Exactly one of u or w lies in Rv */ +--- +> // Exactly one of u or w lies in Rv. +328,329c445,446 +< * Both u and w lie in Rv: determine the one closest to v. +< * See section 9.3 of [1]. +--- +> Both u and w lie in Rv: determine the one closest to v. +> See section 9.4 of [1]. +336,337c453,454 +< * Computes rop(cp g 2^(-127)), where g = g1 2^63 + g0 +< * See section 9.9 and figure 8 of [1]. +--- +> Computes rop(cp g 2^(-127)), where g = g1 2^63 + g0 +> See section 9.10 and figure 5 of [1]. +349c466 +< * Formats the decimal f 10^e. +--- +> Formats the decimal f 10^e. +353,356c470,473 +< * For details not discussed here see section 10 of [1]. +< * +< * Determine len such that +< * 10^(len-1) <= f < 10^len +--- +> For details not discussed here see section 10 of [1]. +> +> Determine len such that +> 10^(len-1) <= f < 10^len +364,367c481,484 +< * Let fp and ep be the original f and e, respectively. +< * Transform f and e to ensure +< * 10^(H-1) <= f < 10^H +< * fp 10^ep = f 10^(e-H) = 0.f 10^e +--- +> Let fp and ep be the original f and e, respectively. +> Transform f and e to ensure +> 10^(H-1) <= f < 10^H +> fp 10^ep = f 10^(e-H) = 0.f 10^e +373,384c490,501 +< * The toChars?() methods perform left-to-right digits extraction +< * using ints, provided that the arguments are limited to 8 digits. +< * Therefore, split the H = 17 digits of f into: +< * h = the most significant digit of f +< * m = the next 8 most significant digits of f +< * l = the last 8, least significant digits of f +< * +< * For n = 17, m = 8 the table in section 10 of [1] shows +< * floor(f / 10^8) = floor(193_428_131_138_340_668 f / 2^84) = +< * floor(floor(193_428_131_138_340_668 f / 2^64) / 2^20) +< * and for n = 9, m = 8 +< * floor(hm / 10^8) = floor(1_441_151_881 hm / 2^57) +--- +> The toChars?() methods perform left-to-right digits extraction +> using ints, provided that the arguments are limited to 8 digits. +> Therefore, split the H = 17 digits of f into: +> h = the most significant digit of f +> m = the next 8 most significant digits of f +> l = the last 8, least significant digits of f +> +> For n = 17, m = 8 the table in section 10 of [1] shows +> floor(f / 10^8) = floor(193_428_131_138_340_668 f / 2^84) = +> floor(floor(193_428_131_138_340_668 f / 2^64) / 2^20) +> and for n = 9, m = 8 +> floor(hm / 10^8) = floor(1_441_151_881 hm / 2^57) +402,404c519,521 +< * 0 < e <= 7: plain format without leading zeroes. +< * Left-to-right digits extraction: +< * algorithm 1 in [3], with b = 10, k = 8, n = 28. +--- +> 0 < e <= 7: plain format without leading zeroes. +> Left-to-right digits extraction: +> algorithm 1 in [3], with b = 10, k = 8, n = 28. +426c543 +< /* -3 < e <= 0: plain format with leading zeroes */ +--- +> // -3 < e <= 0: plain format with leading zeroes. +439c556 +< /* -3 >= e | e > 7: computerized scientific notation */ +--- +> // -3 >= e | e > 7: computerized scientific notation +457,458c574,575 +< * Left-to-right digits extraction: +< * algorithm 1 in [3], with b = 10, k = 8, n = 28. +--- +> Left-to-right digits extraction: +> algorithm 1 in [3], with b = 10, k = 8, n = 28. +472c589 +< /* ... but do not remove the one directly to the right of '.' */ +--- +> // ... but do not remove the one directly to the right of '.' +480,485c597,602 +< * Algorithm 1 in [3] needs computation of +< * floor((a + 1) 2^n / b^k) - 1 +< * with a < 10^8, b = 10, k = 8, n = 28. +< * Noting that +< * (a + 1) 2^n <= 10^8 2^28 < 10^17 +< * For n = 17, m = 8 the table in section 10 of [1] leads to: +--- +> Algorithm 1 in [3] needs computation of +> floor((a + 1) 2^n / b^k) - 1 +> with a < 10^8, b = 10, k = 8, n = 28. +> Noting that +> (a + 1) 2^n <= 10^8 2^28 < 10^17 +> For n = 17, m = 8 the table in section 10 of [1] leads to: +505,506c622,623 +< * For n = 3, m = 2 the table in section 10 of [1] shows +< * floor(e / 100) = floor(1_311 e / 2^17) +--- +> For n = 3, m = 2 the table in section 10 of [1] shows +> floor(e / 100) = floor(1_311 e / 2^17) +513,514c630,631 +< * For n = 2, m = 1 the table in section 10 of [1] shows +< * floor(e / 10) = floor(103 e / 2^10) +--- +> For n = 2, m = 1 the table in section 10 of [1] shows +> floor(e / 10) = floor(103 e / 2^10) +529c646 +< /* Using the deprecated constructor enhances performance */ +--- +> // Using the deprecated constructor enhances performance. diff --git a/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_imports_ignorespace/diff_FloatToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_imports_ignorespace/diff_FloatToDecimal.java.txt new file mode 100644 index 0000000000..846513122f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/gitmerge_ort_imports_ignorespace/diff_FloatToDecimal.java.txt @@ -0,0 +1,577 @@ +2,3c2 +< * Copyright (c) 2021, 2022, Oracle and/or its affiliates. All rights reserved. +< * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. +--- +> * Copyright 2018-2020 Raffaello Giulietti +5,9c4,9 +< * This code is free software; you can redistribute it and/or modify it +< * under the terms of the GNU General Public License version 2 only, as +< * published by the Free Software Foundation. Oracle designates this +< * particular file as subject to the "Classpath" exception as provided +< * by Oracle in the LICENSE file that accompanied this code. +--- +> * Permission is hereby granted, free of charge, to any person obtaining a copy +> * of this software and associated documentation files (the "Software"), to deal +> * in the Software without restriction, including without limitation the rights +> * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> * copies of the Software, and to permit persons to whom the Software is +> * furnished to do so, subject to the following conditions: +11,15c11,12 +< * This code is distributed in the hope that it will be useful, but WITHOUT +< * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +< * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +< * version 2 for more details (a copy is included in the LICENSE file that +< * accompanied this code). +--- +> * The above copyright notice and this permission notice shall be included in +> * all copies or substantial portions of the Software. +17,23c14,20 +< * You should have received a copy of the GNU General Public License version +< * 2 along with this work; if not, write to the Free Software Foundation, +< * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. +< * +< * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA +< * or visit www.oracle.com if you need additional information or have any +< * questions. +--- +> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +> * THE SOFTWARE. +30,32d26 +< import static java.lang.Float.floatToRawIntBits; +< import static java.lang.Integer.numberOfLeadingZeros; +< +39a34,36 +> import static java.lang.Float.floatToRawIntBits; +> import static java.lang.Integer.numberOfLeadingZeros; +> +41a39,40 +> * +> * @author Raffaello Giulietti +45,56c44,55 +< * For full details about this code see the following references: +< * +< * [1] Giulietti, "The Schubfach way to render doubles", +< * https://drive.google.com/file/d/1gp5xv4CAa78SVgCeWfGqqI4FfYYYuNFb +< * +< * [2] IEEE Computer Society, "IEEE Standard for Floating-Point Arithmetic" +< * +< * [3] Bouvier & Zimmermann, "Division-Free Binary-to-Decimal Conversion" +< * +< * Divisions are avoided altogether for the benefit of those architectures +< * that do not provide specific machine instructions or where they are slow. +< * This is discussed in section 10 of [1]. +--- +> For full details about this code see the following references: +> +> [1] Giulietti, "The Schubfach way to render doubles", +> https://drive.google.com/open?id=1luHhyQF9zKlM8yJ1nebU0OgVYhfC6CBN +> +> [2] IEEE Computer Society, "IEEE Standard for Floating-Point Arithmetic" +> +> [3] Bouvier & Zimmermann, "Division-Free Binary-to-Decimal Conversion" +> +> Divisions are avoided altogether for the benefit of those architectures +> that do not provide specific machine instructions or where they are slow. +> This is discussed in section 10 of [1]. +59c58,60 +< /* The precision in bits */ +--- +> // Sources with the license are here: https://github.com/c4f7fcce9cb06515/Schubfach/blob/3c92d3c9b1fead540616c918cdfef432bca53dfa/todec/src/math/FloatToDecimal.java +> +> // The precision in bits. +62c63 +< /* Exponent width in bits */ +--- +> // Exponent width in bits. +65,66c66,67 +< /* Minimum value of the exponent: -(2^(W-1)) - P + 3 */ +< static final int Q_MIN = (-1 << (W - 1)) - P + 3; +--- +> // Minimum value of the exponent: -(2^(W-1)) - P + 3. +> static final int Q_MIN = (-1 << W - 1) - P + 3; +68,69c69,70 +< /* Maximum value of the exponent: 2^(W-1) - P */ +< static final int Q_MAX = (1 << (W - 1)) - P; +--- +> // Maximum value of the exponent: 2^(W-1) - P. +> static final int Q_MAX = (1 << W - 1) - P; +71c72 +< /* 10^(E_MIN - 1) <= MIN_VALUE < 10^E_MIN */ +--- +> // 10^(E_MIN - 1) <= MIN_VALUE < 10^E_MIN +74c75 +< /* 10^(E_MAX - 1) <= MAX_VALUE < 10^E_MAX */ +--- +> // 10^(E_MAX - 1) <= MAX_VALUE < 10^E_MAX +77c78 +< /* Threshold to detect tiny values, as in section 8.2.1 of [1] */ +--- +> // Threshold to detect tiny values, as in section 8.1.1 of [1] +80c81 +< /* The minimum and maximum k, as in section 8 of [1] */ +--- +> // The minimum and maximum k, as in section 8 of [1] +84c85 +< /* H is as in section 8.1 of [1] */ +--- +> // H is as in section 8 of [1]. +87,88c88,89 +< /* Minimum value of the significand of a normal value: 2^(P-1) */ +< private static final int C_MIN = 1 << (P - 1); +--- +> // Minimum value of the significand of a normal value: 2^(P-1). +> private static final int C_MIN = 1 << P - 1; +90c91 +< /* Mask to extract the biased exponent */ +--- +> // Mask to extract the biased exponent. +93,94c94,95 +< /* Mask to extract the fraction bits */ +< private static final int T_MASK = (1 << (P - 1)) - 1; +--- +> // Mask to extract the fraction bits. +> private static final int T_MASK = (1 << P - 1) - 1; +96c97 +< /* Used in rop() */ +--- +> // Used in rop(). +99c100 +< /* Used for left-to-tight digit extraction */ +--- +> // Used for left-to-tight digit extraction. +102,107c103,112 +< private static final int NON_SPECIAL = 0; +< private static final int PLUS_ZERO = 1; +< private static final int MINUS_ZERO = 2; +< private static final int PLUS_INF = 3; +< private static final int MINUS_INF = 4; +< private static final int NAN = 5; +--- +> private static final int NON_SPECIAL = 0; +> private static final int PLUS_ZERO = 1; +> private static final int MINUS_ZERO = 2; +> private static final int PLUS_INF = 3; +> private static final int MINUS_INF = 4; +> private static final int NAN = 5; +> +> // For thread-safety, each thread gets its own instance of this class. +> private static final ThreadLocal threadLocal = +> ThreadLocal.withInitial(FloatToDecimal::new); +110,114c115,119 +< * Room for the longer of the forms +< * -ddddd.dddd H + 2 characters +< * -0.00ddddddddd H + 5 characters +< * -d.ddddddddE-ee H + 6 characters +< * where there are H digits d +--- +> Room for the longer of the forms +> -ddddd.dddd H + 2 characters +> -0.00ddddddddd H + 5 characters +> -d.ddddddddE-ee H + 6 characters +> where there are H digits d +116c121 +< public static final int MAX_CHARS = H + 6; +--- +> public final int MAX_CHARS = H + 6; +117a123 +> // Numerical results are created here... +120c126,129 +< /* Index into bytes of rightmost valid character */ +--- +> // ... and copied here in appendTo() +> private final char[] chars = new char[MAX_CHARS]; +> +> // Index into buf of rightmost valid character. +127,128c136,168 +< * Returns a string representation of the {@code float} +< * argument. All characters mentioned below are ASCII characters. +--- +> * Returns a string rendering of the {@code float} argument. +> * +> *

    The characters of the result are all drawn from the ASCII set. +> *

      +> *
    • Any NaN, whether quiet or signaling, is rendered as +> * {@code "NaN"}, regardless of the sign bit. +> *
    • The infinities +∞ and -∞ are rendered as +> * {@code "Infinity"} and {@code "-Infinity"}, respectively. +> *
    • The positive and negative zeroes are rendered as +> * {@code "0.0"} and {@code "-0.0"}, respectively. +> *
    • A finite negative {@code v} is rendered as the sign +> * '{@code -}' followed by the rendering of the magnitude -{@code v}. +> *
    • A finite positive {@code v} is rendered in two stages: +> *
        +> *
      • Selection of a decimal: A well-defined +> * decimal dv is selected +> * to represent {@code v}. +> *
      • Formatting as a string: The decimal +> * dv is formatted as a string, +> * either in plain or in computerized scientific notation, +> * depending on its value. +> *
      +> *
    +> * +> *

    A decimal is a number of the form +> * d×10i +> * for some (unique) integers d > 0 and i such that +> * d is not a multiple of 10. +> * These integers are the significand and +> * the exponent, respectively, of the decimal. +> * The length of the decimal is the (unique) +> * integer n meeting +> * 10n-1d < 10n. +130,132c170,246 +< * @param v the {@code float} to be converted. +< * @return a string representation of the argument. +< * @see Float#toString(float) +--- +> *

    The decimal dv +> * for a finite positive {@code v} is defined as follows: +> *

      +> *
    • Let R be the set of all decimals that round to {@code v} +> * according to the usual round-to-closest rule of +> * IEEE 754 floating-point arithmetic. +> *
    • Let m be the minimal length over all decimals in R. +> *
    • When m ≥ 2, let T be the set of all decimals +> * in R with length m. +> * Otherwise, let T be the set of all decimals +> * in R with length 1 or 2. +> *
    • Define dv as +> * the decimal in T that is closest to {@code v}. +> * Or if there are two such decimals in T, +> * select the one with the even significand (there is exactly one). +> *
    +> * +> *

    The (uniquely) selected decimal dv +> * is then formatted. +> * +> *

    Let d, i and n be the significand, exponent and +> * length of dv, respectively. +> * Further, let e = n + i - 1 and let +> * d1dn +> * be the usual decimal expansion of the significand. +> * Note that d1 ≠ 0 ≠ dn. +> *

      +> *
    • Case -3 ≤ e < 0: +> * dv is formatted as +> * 0.00d1dn, +> * where there are exactly -(n + i) zeroes between +> * the decimal point and d1. +> * For example, 123 × 10-4 is formatted as +> * {@code 0.0123}. +> *
    • Case 0 ≤ e < 7: +> *
        +> *
      • Subcase i ≥ 0: +> * dv is formatted as +> * d1dn00.0, +> * where there are exactly i zeroes +> * between dn and the decimal point. +> * For example, 123 × 102 is formatted as +> * {@code 12300.0}. +> *
      • Subcase i < 0: +> * dv is formatted as +> * d1dn+i.dn+i+1dn. +> * There are exactly -i digits to the right of +> * the decimal point. +> * For example, 123 × 10-1 is formatted as +> * {@code 12.3}. +> *
      +> *
    • Case e < -3 or e ≥ 7: +> * computerized scientific notation is used to format +> * dv. +> * Here e is formatted as by {@link Integer#toString(int)}. +> *
        +> *
      • Subcase n = 1: +> * dv is formatted as +> * d1.0Ee. +> * For example, 1 × 1023 is formatted as +> * {@code 1.0E23}. +> *
      • Subcase n > 1: +> * dv is formatted as +> * d1.d2dnEe. +> * For example, 123 × 10-21 is formatted as +> * {@code 1.23E-19}. +> *
      +> *
    +> * +> * @param v the {@code float} to be rendered. +> * @return a string rendering of the argument. +135c249 +< return new FloatToDecimal().toDecimalString(v); +--- +> return threadLocalInstance().toDecimalString(v); +151c265,269 +< return new FloatToDecimal().appendDecimalTo(v, app); +--- +> return threadLocalInstance().appendDecimalTo(v, app); +> } +> +> private static FloatToDecimal threadLocalInstance() { +> return threadLocal.get(); +169,170c287 +< char[] chars = new char[index + 1]; +< for (int i = 0; i < chars.length; ++i) { +--- +> for (int i = 0; i <= index; ++i) { +174c291 +< return ((StringBuilder) app).append(chars); +--- +> return ((StringBuilder) app).append(chars, 0, index + 1); +177c294 +< return ((StringBuffer) app).append(chars); +--- +> return ((StringBuffer) app).append(chars, 0, index + 1); +179,180c296,297 +< for (char c : chars) { +< app.append(c); +--- +> for (int i = 0; i <= index; ++i) { +> app.append(chars[i]); +192,197c309,314 +< * Returns +< * PLUS_ZERO iff v is 0.0 +< * MINUS_ZERO iff v is -0.0 +< * PLUS_INF iff v is POSITIVE_INFINITY +< * MINUS_INF iff v is NEGATIVE_INFINITY +< * NAN iff v is NaN +--- +> Returns +> PLUS_ZERO iff v is 0.0 +> MINUS_ZERO iff v is -0.0 +> PLUS_INF iff v is POSITIVE_INFINITY +> MINUS_INF iff v is NEGATIVE_INFINITY +> NAN iff v is NaN +201,207c318,324 +< * For full details see references [2] and [1]. +< * +< * For finite v != 0, determine integers c and q such that +< * |v| = c 2^q and +< * Q_MIN <= q <= Q_MAX and +< * either 2^(P-1) <= c < 2^P (normal) +< * or 0 < c < 2^(P-1) and q = Q_MIN (subnormal) +--- +> For full details see references [2] and [1]. +> +> For finite v != 0, determine integers c and q such that +> |v| = c 2^q and +> Q_MIN <= q <= Q_MAX and +> either 2^(P-1) <= c < 2^P (normal) +> or 0 < c < 2^(P-1) and q = Q_MIN (subnormal) +218c335 +< /* normal value. Here mq = -q */ +--- +> // normal value. Here mq = -q +221c338 +< /* The fast path discussed in section 8.3 of [1] */ +--- +> // The fast path discussed in section 8.2 of [1]. +231c348 +< /* subnormal value */ +--- +> // subnormal value +246,261c363,378 +< * The skeleton corresponds to figure 7 of [1]. +< * The efficient computations are those summarized in figure 9. +< * Also check the appendix. +< * +< * Here's a correspondence between Java names and names in [1], +< * expressed as approximate LaTeX source code and informally. +< * Other names are identical. +< * cb: \bar{c} "c-bar" +< * cbr: \bar{c}_r "c-bar-r" +< * cbl: \bar{c}_l "c-bar-l" +< * +< * vb: \bar{v} "v-bar" +< * vbr: \bar{v}_r "v-bar-r" +< * vbl: \bar{v}_l "v-bar-l" +< * +< * rop: r_o' "r-o-prime" +--- +> The skeleton corresponds to figure 4 of [1]. +> The efficient computations are those summarized in figure 7. +> Also check the appendix. +> +> Here's a correspondence between Java names and names in [1], +> expressed as approximate LaTeX source code and informally. +> Other names are identical. +> cb: \bar{c} "c-bar" +> cbr: \bar{c}_r "c-bar-r" +> cbl: \bar{c}_l "c-bar-l" +> +> vb: \bar{v} "v-bar" +> vbr: \bar{v}_r "v-bar-r" +> vbl: \bar{v}_l "v-bar-l" +> +> rop: r_o' "r-o-prime" +269,271c386,388 +< * flog10pow2(e) = floor(log_10(2^e)) +< * flog10threeQuartersPow2(e) = floor(log_10(3/4 2^e)) +< * flog2pow10(e) = floor(log_2(10^e)) +--- +> flog10pow2(e) = floor(log_10(2^e)) +> flog10threeQuartersPow2(e) = floor(log_10(3/4 2^e)) +> flog2pow10(e) = floor(log_2(10^e)) +274c391 +< /* regular spacing */ +--- +> // regular spacing +278c395 +< /* irregular spacing */ +--- +> // irregular spacing0 +284c401 +< /* g is as in the appendix */ +--- +> // g is as in the appendix +294,301c411,418 +< * For n = 9, m = 1 the table in section 10 of [1] shows +< * s' = floor(s / 10) = floor(s 1_717_986_919 / 2^34) +< * +< * sp10 = 10 s' +< * tp10 = 10 t' +< * upin iff u' = sp10 10^k in Rv +< * wpin iff w' = tp10 10^k in Rv +< * See section 9.3 of [1]. +--- +> For n = 9, m = 1 the table in section 10 of [1] shows +> s' = floor(s / 10) = floor(s 1_717_986_919 / 2^34) +> +> sp10 = 10 s' +> tp10 = 10 t' +> upin iff u' = sp10 10^k in Rv +> wpin iff w' = tp10 10^k in Rv +> See section 9.4 of [1]. +313,316c430,433 +< * 10 <= s < 100 or s >= 100 and u', w' not in Rv +< * uin iff u = s 10^k in Rv +< * win iff w = t 10^k in Rv +< * See section 9.3 of [1]. +--- +> 10 <= s < 100 or s >= 100 and u', w' not in Rv +> uin iff u = s 10^k in Rv +> win iff w = t 10^k in Rv +> See section 9.4 of [1]. +322c439 +< /* Exactly one of u or w lies in Rv */ +--- +> // Exactly one of u or w lies in Rv. +326,327c443,444 +< * Both u and w lie in Rv: determine the one closest to v. +< * See section 9.3 of [1]. +--- +> Both u and w lie in Rv: determine the one closest to v. +> See section 9.4 of [1]. +334,335c451,452 +< * Computes rop(cp g 2^(-95)) +< * See appendix and figure 11 of [1]. +--- +> Computes rop(cp g 2^(-95)) +> See appendix and figure 8 of [1]. +344c461 +< * Formats the decimal f 10^e. +--- +> Formats the decimal f 10^e. +348,351c465,468 +< * For details not discussed here see section 10 of [1]. +< * +< * Determine len such that +< * 10^(len-1) <= f < 10^len +--- +> For details not discussed here see section 10 of [1]. +> +> Determine len such that +> 10^(len-1) <= f < 10^len +359,362c476,479 +< * Let fp and ep be the original f and e, respectively. +< * Transform f and e to ensure +< * 10^(H-1) <= f < 10^H +< * fp 10^ep = f 10^(e-H) = 0.f 10^e +--- +> Let fp and ep be the original f and e, respectively. +> Transform f and e to ensure +> 10^(H-1) <= f < 10^H +> fp 10^ep = f 10^(e-H) = 0.f 10^e +364c481 +< f *= (int)pow10(H - len); +--- +> f *= pow10(H - len); +368,375c485,492 +< * The toChars?() methods perform left-to-right digits extraction +< * using ints, provided that the arguments are limited to 8 digits. +< * Therefore, split the H = 9 digits of f into: +< * h = the most significant digit of f +< * l = the last 8, least significant digits of f +< * +< * For n = 9, m = 8 the table in section 10 of [1] shows +< * floor(f / 10^8) = floor(1_441_151_881 f / 2^57) +--- +> The toChars?() methods perform left-to-right digits extraction +> using ints, provided that the arguments are limited to 8 digits. +> Therefore, split the H = 9 digits of f into: +> h = the most significant digit of f +> l = the last 8, least significant digits of f +> +> For n = 9, m = 8 the table in section 10 of [1] shows +> floor(f / 10^8) = floor(1_441_151_881 f / 2^57) +391,393c508,510 +< * 0 < e <= 7: plain format without leading zeroes. +< * Left-to-right digits extraction: +< * algorithm 1 in [3], with b = 10, k = 8, n = 28. +--- +> 0 < e <= 7: plain format without leading zeroes. +> Left-to-right digits extraction: +> algorithm 1 in [3], with b = 10, k = 8, n = 28. +415c532 +< /* -3 < e <= 0: plain format with leading zeroes */ +--- +> // -3 < e <= 0: plain format with leading zeroes. +428c545 +< /* -3 >= e | e > 7: computerized scientific notation */ +--- +> // -3 >= e | e > 7: computerized scientific notation +439,440c556,557 +< * Left-to-right digits extraction: +< * algorithm 1 in [3], with b = 10, k = 8, n = 28. +--- +> Left-to-right digits extraction: +> algorithm 1 in [3], with b = 10, k = 8, n = 28. +454c571 +< /* ... but do not remove the one directly to the right of '.' */ +--- +> // ... but do not remove the one directly to the right of '.' +462,467c579,584 +< * Algorithm 1 in [3] needs computation of +< * floor((a + 1) 2^n / b^k) - 1 +< * with a < 10^8, b = 10, k = 8, n = 28. +< * Noting that +< * (a + 1) 2^n <= 10^8 2^28 < 10^17 +< * For n = 17, m = 8 the table in section 10 of [1] leads to: +--- +> Algorithm 1 in [3] needs computation of +> floor((a + 1) 2^n / b^k) - 1 +> with a < 10^8, b = 10, k = 8, n = 28. +> Noting that +> (a + 1) 2^n <= 10^8 2^28 < 10^17 +> For n = 17, m = 8 the table in section 10 of [1] leads to: +485,486c602,603 +< * For n = 2, m = 1 the table in section 10 of [1] shows +< * floor(e / 10) = floor(103 e / 2^10) +--- +> For n = 2, m = 1 the table in section 10 of [1] shows +> floor(e / 10) = floor(103 e / 2^10) +501c618 +< /* Using the deprecated constructor enhances performance */ +--- +> // Using the deprecated constructor enhances performance. +507a625 +> diff --git a/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_histogram/diff_DoubleToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_histogram/diff_DoubleToDecimal.java.txt new file mode 100644 index 0000000000..ed277c312b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_histogram/diff_DoubleToDecimal.java.txt @@ -0,0 +1,22 @@ +27,30d26 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/io/schubfach/DoubleToDecimal.java +< import static java.lang.Double.doubleToRawLongBits; +< import static java.lang.Long.numberOfLeadingZeros; +< +38,49c34 +< ||||||| d63cef092:src/main/java/com/fasterxml/jackson/core/io/schubfach/DoubleToDecimal.java +< import static java.lang.Double.*; +< import static java.lang.Long.*; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.*; +< ======= +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10pow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10threeQuartersPow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog2pow10; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g0; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g1; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.multiplyHigh; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.pow10; +--- +> +52d36 +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/io/schubfach/DoubleToDecimal.java diff --git a/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_histogram/diff_FloatToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_histogram/diff_FloatToDecimal.java.txt new file mode 100644 index 0000000000..3cc2b3daf2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_histogram/diff_FloatToDecimal.java.txt @@ -0,0 +1,21 @@ +27,30d26 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/io/schubfach/FloatToDecimal.java +< import static java.lang.Float.floatToRawIntBits; +< import static java.lang.Integer.numberOfLeadingZeros; +< +37,47c33 +< ||||||| d63cef092:src/main/java/com/fasterxml/jackson/core/io/schubfach/FloatToDecimal.java +< import static java.lang.Float.*; +< import static java.lang.Integer.*; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.*; +< ======= +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10pow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10threeQuartersPow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog2pow10; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g1; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.multiplyHigh; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.pow10; +--- +> +50d35 +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/io/schubfach/FloatToDecimal.java diff --git a/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_ignorespace/diff_DoubleToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_ignorespace/diff_DoubleToDecimal.java.txt new file mode 100644 index 0000000000..318253f342 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_ignorespace/diff_DoubleToDecimal.java.txt @@ -0,0 +1,36 @@ +27,30d26 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/io/schubfach/DoubleToDecimal.java +< import static java.lang.Double.doubleToRawLongBits; +< import static java.lang.Long.numberOfLeadingZeros; +< +38,49c34 +< ||||||| d63cef092:src/main/java/com/fasterxml/jackson/core/io/schubfach/DoubleToDecimal.java +< import static java.lang.Double.*; +< import static java.lang.Long.*; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.*; +< ======= +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10pow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10threeQuartersPow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog2pow10; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g0; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g1; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.multiplyHigh; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.pow10; +--- +> +52d36 +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/io/schubfach/DoubleToDecimal.java +120,125c104,109 +< private static final int NON_SPECIAL = 0; +< private static final int PLUS_ZERO = 1; +< private static final int MINUS_ZERO = 2; +< private static final int PLUS_INF = 3; +< private static final int MINUS_INF = 4; +< private static final int NAN = 5; +--- +> private static final int NON_SPECIAL = 0; +> private static final int PLUS_ZERO = 1; +> private static final int MINUS_ZERO = 2; +> private static final int PLUS_INF = 3; +> private static final int MINUS_INF = 4; +> private static final int NAN = 5; diff --git a/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_ignorespace/diff_FloatToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_ignorespace/diff_FloatToDecimal.java.txt new file mode 100644 index 0000000000..03eb31fb3c --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_ignorespace/diff_FloatToDecimal.java.txt @@ -0,0 +1,35 @@ +27,30d26 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/io/schubfach/FloatToDecimal.java +< import static java.lang.Float.floatToRawIntBits; +< import static java.lang.Integer.numberOfLeadingZeros; +< +37,47c33 +< ||||||| d63cef092:src/main/java/com/fasterxml/jackson/core/io/schubfach/FloatToDecimal.java +< import static java.lang.Float.*; +< import static java.lang.Integer.*; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.*; +< ======= +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10pow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10threeQuartersPow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog2pow10; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g1; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.multiplyHigh; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.pow10; +--- +> +50d35 +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/io/schubfach/FloatToDecimal.java +118,123c103,108 +< private static final int NON_SPECIAL = 0; +< private static final int PLUS_ZERO = 1; +< private static final int MINUS_ZERO = 2; +< private static final int PLUS_INF = 3; +< private static final int MINUS_INF = 4; +< private static final int NAN = 5; +--- +> private static final int NON_SPECIAL = 0; +> private static final int PLUS_ZERO = 1; +> private static final int MINUS_ZERO = 2; +> private static final int PLUS_INF = 3; +> private static final int MINUS_INF = 4; +> private static final int NAN = 5; diff --git a/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_minimal/diff_DoubleToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_minimal/diff_DoubleToDecimal.java.txt new file mode 100644 index 0000000000..ed277c312b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_minimal/diff_DoubleToDecimal.java.txt @@ -0,0 +1,22 @@ +27,30d26 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/io/schubfach/DoubleToDecimal.java +< import static java.lang.Double.doubleToRawLongBits; +< import static java.lang.Long.numberOfLeadingZeros; +< +38,49c34 +< ||||||| d63cef092:src/main/java/com/fasterxml/jackson/core/io/schubfach/DoubleToDecimal.java +< import static java.lang.Double.*; +< import static java.lang.Long.*; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.*; +< ======= +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10pow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10threeQuartersPow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog2pow10; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g0; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g1; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.multiplyHigh; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.pow10; +--- +> +52d36 +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/io/schubfach/DoubleToDecimal.java diff --git a/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_minimal/diff_FloatToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_minimal/diff_FloatToDecimal.java.txt new file mode 100644 index 0000000000..3cc2b3daf2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_minimal/diff_FloatToDecimal.java.txt @@ -0,0 +1,21 @@ +27,30d26 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/io/schubfach/FloatToDecimal.java +< import static java.lang.Float.floatToRawIntBits; +< import static java.lang.Integer.numberOfLeadingZeros; +< +37,47c33 +< ||||||| d63cef092:src/main/java/com/fasterxml/jackson/core/io/schubfach/FloatToDecimal.java +< import static java.lang.Float.*; +< import static java.lang.Integer.*; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.*; +< ======= +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10pow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10threeQuartersPow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog2pow10; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g1; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.multiplyHigh; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.pow10; +--- +> +50d35 +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/io/schubfach/FloatToDecimal.java diff --git a/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_myers/diff_DoubleToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_myers/diff_DoubleToDecimal.java.txt new file mode 100644 index 0000000000..ed277c312b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_myers/diff_DoubleToDecimal.java.txt @@ -0,0 +1,22 @@ +27,30d26 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/io/schubfach/DoubleToDecimal.java +< import static java.lang.Double.doubleToRawLongBits; +< import static java.lang.Long.numberOfLeadingZeros; +< +38,49c34 +< ||||||| d63cef092:src/main/java/com/fasterxml/jackson/core/io/schubfach/DoubleToDecimal.java +< import static java.lang.Double.*; +< import static java.lang.Long.*; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.*; +< ======= +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10pow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10threeQuartersPow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog2pow10; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g0; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g1; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.multiplyHigh; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.pow10; +--- +> +52d36 +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/io/schubfach/DoubleToDecimal.java diff --git a/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_myers/diff_FloatToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_myers/diff_FloatToDecimal.java.txt new file mode 100644 index 0000000000..3cc2b3daf2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_myers/diff_FloatToDecimal.java.txt @@ -0,0 +1,21 @@ +27,30d26 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/io/schubfach/FloatToDecimal.java +< import static java.lang.Float.floatToRawIntBits; +< import static java.lang.Integer.numberOfLeadingZeros; +< +37,47c33 +< ||||||| d63cef092:src/main/java/com/fasterxml/jackson/core/io/schubfach/FloatToDecimal.java +< import static java.lang.Float.*; +< import static java.lang.Integer.*; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.*; +< ======= +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10pow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10threeQuartersPow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog2pow10; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g1; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.multiplyHigh; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.pow10; +--- +> +50d35 +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/io/schubfach/FloatToDecimal.java diff --git a/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_patience/diff_DoubleToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_patience/diff_DoubleToDecimal.java.txt new file mode 100644 index 0000000000..ed277c312b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_patience/diff_DoubleToDecimal.java.txt @@ -0,0 +1,22 @@ +27,30d26 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/io/schubfach/DoubleToDecimal.java +< import static java.lang.Double.doubleToRawLongBits; +< import static java.lang.Long.numberOfLeadingZeros; +< +38,49c34 +< ||||||| d63cef092:src/main/java/com/fasterxml/jackson/core/io/schubfach/DoubleToDecimal.java +< import static java.lang.Double.*; +< import static java.lang.Long.*; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.*; +< ======= +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10pow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10threeQuartersPow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog2pow10; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g0; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g1; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.multiplyHigh; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.pow10; +--- +> +52d36 +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/io/schubfach/DoubleToDecimal.java diff --git a/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_patience/diff_FloatToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_patience/diff_FloatToDecimal.java.txt new file mode 100644 index 0000000000..3cc2b3daf2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/gitmerge_recursive_patience/diff_FloatToDecimal.java.txt @@ -0,0 +1,21 @@ +27,30d26 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/io/schubfach/FloatToDecimal.java +< import static java.lang.Float.floatToRawIntBits; +< import static java.lang.Integer.numberOfLeadingZeros; +< +37,47c33 +< ||||||| d63cef092:src/main/java/com/fasterxml/jackson/core/io/schubfach/FloatToDecimal.java +< import static java.lang.Float.*; +< import static java.lang.Integer.*; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.*; +< ======= +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10pow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10threeQuartersPow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog2pow10; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g1; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.multiplyHigh; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.pow10; +--- +> +50d35 +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/io/schubfach/FloatToDecimal.java diff --git a/src/python/merge_conflict_analysis_diffs/845/intellimerge/diff_DoubleToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/intellimerge/diff_DoubleToDecimal.java.txt new file mode 100644 index 0000000000..ed277c312b --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/intellimerge/diff_DoubleToDecimal.java.txt @@ -0,0 +1,22 @@ +27,30d26 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/io/schubfach/DoubleToDecimal.java +< import static java.lang.Double.doubleToRawLongBits; +< import static java.lang.Long.numberOfLeadingZeros; +< +38,49c34 +< ||||||| d63cef092:src/main/java/com/fasterxml/jackson/core/io/schubfach/DoubleToDecimal.java +< import static java.lang.Double.*; +< import static java.lang.Long.*; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.*; +< ======= +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10pow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10threeQuartersPow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog2pow10; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g0; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g1; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.multiplyHigh; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.pow10; +--- +> +52d36 +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/io/schubfach/DoubleToDecimal.java diff --git a/src/python/merge_conflict_analysis_diffs/845/intellimerge/diff_FloatToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/intellimerge/diff_FloatToDecimal.java.txt new file mode 100644 index 0000000000..3cc2b3daf2 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/intellimerge/diff_FloatToDecimal.java.txt @@ -0,0 +1,21 @@ +27,30d26 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/io/schubfach/FloatToDecimal.java +< import static java.lang.Float.floatToRawIntBits; +< import static java.lang.Integer.numberOfLeadingZeros; +< +37,47c33 +< ||||||| d63cef092:src/main/java/com/fasterxml/jackson/core/io/schubfach/FloatToDecimal.java +< import static java.lang.Float.*; +< import static java.lang.Integer.*; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.*; +< ======= +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10pow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog10threeQuartersPow2; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.flog2pow10; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.g1; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.multiplyHigh; +< import static com.fasterxml.jackson.core.io.schubfach.MathUtils.pow10; +--- +> +50d35 +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/io/schubfach/FloatToDecimal.java diff --git a/src/python/merge_conflict_analysis_diffs/845/spork/diff_DoubleToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/spork/diff_DoubleToDecimal.java.txt new file mode 100644 index 0000000000..8d03f594b1 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/spork/diff_DoubleToDecimal.java.txt @@ -0,0 +1,585 @@ +2,3c2 +< * Copyright (c) 2021, 2022, Oracle and/or its affiliates. All rights reserved. +< * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. +--- +> * Copyright 2018-2020 Raffaello Giulietti +5,9c4,9 +< * This code is free software; you can redistribute it and/or modify it +< * under the terms of the GNU General Public License version 2 only, as +< * published by the Free Software Foundation. Oracle designates this +< * particular file as subject to the "Classpath" exception as provided +< * by Oracle in the LICENSE file that accompanied this code. +--- +> * Permission is hereby granted, free of charge, to any person obtaining a copy +> * of this software and associated documentation files (the "Software"), to deal +> * in the Software without restriction, including without limitation the rights +> * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> * copies of the Software, and to permit persons to whom the Software is +> * furnished to do so, subject to the following conditions: +11,15c11,12 +< * This code is distributed in the hope that it will be useful, but WITHOUT +< * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +< * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +< * version 2 for more details (a copy is included in the LICENSE file that +< * accompanied this code). +--- +> * The above copyright notice and this permission notice shall be included in +> * all copies or substantial portions of the Software. +17,23c14,20 +< * You should have received a copy of the GNU General Public License version +< * 2 along with this work; if not, write to the Free Software Foundation, +< * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. +< * +< * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA +< * or visit www.oracle.com if you need additional information or have any +< * questions. +--- +> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +> * THE SOFTWARE. +30,32d26 +< import static java.lang.Double.doubleToRawLongBits; +< import static java.lang.Long.numberOfLeadingZeros; +< +40a35,37 +> import static java.lang.Double.doubleToRawLongBits; +> import static java.lang.Long.numberOfLeadingZeros; +> +42a40,41 +> * +> * @author Raffaello Giulietti +46,57c45,56 +< * For full details about this code see the following references: +< * +< * [1] Giulietti, "The Schubfach way to render doubles", +< * https://drive.google.com/file/d/1gp5xv4CAa78SVgCeWfGqqI4FfYYYuNFb +< * +< * [2] IEEE Computer Society, "IEEE Standard for Floating-Point Arithmetic" +< * +< * [3] Bouvier & Zimmermann, "Division-Free Binary-to-Decimal Conversion" +< * +< * Divisions are avoided altogether for the benefit of those architectures +< * that do not provide specific machine instructions or where they are slow. +< * This is discussed in section 10 of [1]. +--- +> For full details about this code see the following references: +> +> [1] Giulietti, "The Schubfach way to render doubles", +> https://drive.google.com/open?id=1luHhyQF9zKlM8yJ1nebU0OgVYhfC6CBN +> +> [2] IEEE Computer Society, "IEEE Standard for Floating-Point Arithmetic" +> +> [3] Bouvier & Zimmermann, "Division-Free Binary-to-Decimal Conversion" +> +> Divisions are avoided altogether for the benefit of those architectures +> that do not provide specific machine instructions or where they are slow. +> This is discussed in section 10 of [1]. +60c59,61 +< /* The precision in bits */ +--- +> // Sources with the license are here: https://github.com/c4f7fcce9cb06515/Schubfach/blob/3c92d3c9b1fead540616c918cdfef432bca53dfa/todec/src/math/FloatToDecimal.java +> +> // The precision in bits. +63c64 +< /* Exponent width in bits */ +--- +> // Exponent width in bits. +66,67c67,68 +< /* Minimum value of the exponent: -(2^(W-1)) - P + 3 */ +< static final int Q_MIN = (-1 << (W - 1)) - P + 3; +--- +> // Minimum value of the exponent: -(2^(W-1)) - P + 3. +> static final int Q_MIN = (-1 << W - 1) - P + 3; +69,70c70,71 +< /* Maximum value of the exponent: 2^(W-1) - P */ +< static final int Q_MAX = (1 << (W - 1)) - P; +--- +> // Maximum value of the exponent: 2^(W-1) - P. +> static final int Q_MAX = (1 << W - 1) - P; +72c73 +< /* 10^(E_MIN - 1) <= MIN_VALUE < 10^E_MIN */ +--- +> // 10^(E_MIN - 1) <= MIN_VALUE < 10^E_MIN +75c76 +< /* 10^(E_MAX - 1) <= MAX_VALUE < 10^E_MAX */ +--- +> // 10^(E_MAX - 1) <= MAX_VALUE < 10^E_MAX +78c79 +< /* Threshold to detect tiny values, as in section 8.2.1 of [1] */ +--- +> // Threshold to detect tiny values, as in section 8.1.1 of [1] +81c82 +< /* The minimum and maximum k, as in section 8 of [1] */ +--- +> // The minimum and maximum k, as in section 8 of [1] +85c86 +< /* H is as in section 8.1 of [1] */ +--- +> // H is as in section 8 of [1]. +88,89c89,90 +< /* Minimum value of the significand of a normal value: 2^(P-1) */ +< private static final long C_MIN = 1L << (P - 1); +--- +> // Minimum value of the significand of a normal value: 2^(P-1). +> private static final long C_MIN = 1L << P - 1; +91c92 +< /* Mask to extract the biased exponent */ +--- +> // Mask to extract the biased exponent. +94,95c95,96 +< /* Mask to extract the fraction bits */ +< private static final long T_MASK = (1L << (P - 1)) - 1; +--- +> // Mask to extract the fraction bits. +> private static final long T_MASK = (1L << P - 1) - 1; +97c98 +< /* Used in rop() */ +--- +> // Used in rop(). +100c101 +< /* Used for left-to-tight digit extraction */ +--- +> // Used for left-to-tight digit extraction. +103,108c104,113 +< private static final int NON_SPECIAL = 0; +< private static final int PLUS_ZERO = 1; +< private static final int MINUS_ZERO = 2; +< private static final int PLUS_INF = 3; +< private static final int MINUS_INF = 4; +< private static final int NAN = 5; +--- +> private static final int NON_SPECIAL = 0; +> private static final int PLUS_ZERO = 1; +> private static final int MINUS_ZERO = 2; +> private static final int PLUS_INF = 3; +> private static final int MINUS_INF = 4; +> private static final int NAN = 5; +> +> // For thread-safety, each thread gets its own instance of this class. +> private static final ThreadLocal threadLocal = +> ThreadLocal.withInitial(DoubleToDecimal::new); +111,115c116,120 +< * Room for the longer of the forms +< * -ddddd.dddddddddddd H + 2 characters +< * -0.00ddddddddddddddddd H + 5 characters +< * -d.ddddddddddddddddE-eee H + 7 characters +< * where there are H digits d +--- +> Room for the longer of the forms +> -ddddd.dddddddddddd H + 2 characters +> -0.00ddddddddddddddddd H + 5 characters +> -d.ddddddddddddddddE-eee H + 7 characters +> where there are H digits d +117c122 +< public static final int MAX_CHARS = H + 7; +--- +> public final int MAX_CHARS = H + 7; +118a124 +> // Numerical results are created here... +121c127,130 +< /* Index into bytes of rightmost valid character */ +--- +> // ... and copied here in appendTo() +> private final char[] chars = new char[MAX_CHARS]; +> +> // Index into bytes of rightmost valid character. +128,129c137,189 +< * Returns a string representation of the {@code double} +< * argument. All characters mentioned below are ASCII characters. +--- +> * Returns a string rendering of the {@code double} argument. +> * +> *

    The characters of the result are all drawn from the ASCII set. +> *

      +> *
    • Any NaN, whether quiet or signaling, is rendered as +> * {@code "NaN"}, regardless of the sign bit. +> *
    • The infinities +∞ and -∞ are rendered as +> * {@code "Infinity"} and {@code "-Infinity"}, respectively. +> *
    • The positive and negative zeroes are rendered as +> * {@code "0.0"} and {@code "-0.0"}, respectively. +> *
    • A finite negative {@code v} is rendered as the sign +> * '{@code -}' followed by the rendering of the magnitude -{@code v}. +> *
    • A finite positive {@code v} is rendered in two stages: +> *
        +> *
      • Selection of a decimal: A well-defined +> * decimal dv is selected +> * to represent {@code v}. +> *
      • Formatting as a string: The decimal +> * dv is formatted as a string, +> * either in plain or in computerized scientific notation, +> * depending on its value. +> *
      +> *
    +> * +> *

    A decimal is a number of the form +> * d×10i +> * for some (unique) integers d > 0 and i such that +> * d is not a multiple of 10. +> * These integers are the significand and +> * the exponent, respectively, of the decimal. +> * The length of the decimal is the (unique) +> * integer n meeting +> * 10n-1d < 10n. +> * +> *

    The decimal dv +> * for a finite positive {@code v} is defined as follows: +> *

      +> *
    • Let R be the set of all decimals that round to {@code v} +> * according to the usual round-to-closest rule of +> * IEEE 754 floating-point arithmetic. +> *
    • Let m be the minimal length over all decimals in R. +> *
    • When m ≥ 2, let T be the set of all decimals +> * in R with length m. +> * Otherwise, let T be the set of all decimals +> * in R with length 1 or 2. +> *
    • Define dv as +> * the decimal in T that is closest to {@code v}. +> * Or if there are two such decimals in T, +> * select the one with the even significand (there is exactly one). +> *
    +> * +> *

    The (uniquely) selected decimal dv +> * is then formatted. +131,133c191,247 +< * @param v the {@code double} to be converted. +< * @return a string representation of the argument. +< * @see Double#toString(double) +--- +> *

    Let d, i and n be the significand, exponent and +> * length of dv, respectively. +> * Further, let e = n + i - 1 and let +> * d1dn +> * be the usual decimal expansion of the significand. +> * Note that d1 ≠ 0 ≠ dn. +> *

      +> *
    • Case -3 ≤ e < 0: +> * dv is formatted as +> * 0.00d1dn, +> * where there are exactly -(n + i) zeroes between +> * the decimal point and d1. +> * For example, 123 × 10-4 is formatted as +> * {@code 0.0123}. +> *
    • Case 0 ≤ e < 7: +> *
        +> *
      • Subcase i ≥ 0: +> * dv is formatted as +> * d1dn00.0, +> * where there are exactly i zeroes +> * between dn and the decimal point. +> * For example, 123 × 102 is formatted as +> * {@code 12300.0}. +> *
      • Subcase i < 0: +> * dv is formatted as +> * d1dn+i.dn+i+1dn. +> * There are exactly -i digits to the right of +> * the decimal point. +> * For example, 123 × 10-1 is formatted as +> * {@code 12.3}. +> *
      +> *
    • Case e < -3 or e ≥ 7: +> * computerized scientific notation is used to format +> * dv. +> * Here e is formatted as by {@link Integer#toString(int)}. +> *
        +> *
      • Subcase n = 1: +> * dv is formatted as +> * d1.0Ee. +> * For example, 1 × 1023 is formatted as +> * {@code 1.0E23}. +> *
      • Subcase n > 1: +> * dv is formatted as +> * d1.d2dnEe. +> * For example, 123 × 10-21 is formatted as +> * {@code 1.23E-19}. +> *
      +> *
    +> * +> * @param v the {@code double} to be rendered. +> * @return a string rendering of the argument. +136c250 +< return new DoubleToDecimal().toDecimalString(v); +--- +> return threadLocalInstance().toDecimalString(v); +152c266,270 +< return new DoubleToDecimal().appendDecimalTo(v, app); +--- +> return threadLocalInstance().appendDecimalTo(v, app); +> } +> +> private static DoubleToDecimal threadLocalInstance() { +> return threadLocal.get(); +170,171c288 +< char[] chars = new char[index + 1]; +< for (int i = 0; i < chars.length; ++i) { +--- +> for (int i = 0; i <= index; ++i) { +175c292 +< return ((StringBuilder) app).append(chars); +--- +> return ((StringBuilder) app).append(chars, 0, index + 1); +178c295 +< return ((StringBuffer) app).append(chars); +--- +> return ((StringBuffer) app).append(chars, 0, index + 1); +180,181c297,298 +< for (char c : chars) { +< app.append(c); +--- +> for (int i = 0; i <= index; ++i) { +> app.append(chars[i]); +193,198c310,315 +< * Returns +< * PLUS_ZERO iff v is 0.0 +< * MINUS_ZERO iff v is -0.0 +< * PLUS_INF iff v is POSITIVE_INFINITY +< * MINUS_INF iff v is NEGATIVE_INFINITY +< * NAN iff v is NaN +--- +> Returns +> PLUS_ZERO iff v is 0.0 +> MINUS_ZERO iff v is -0.0 +> PLUS_INF iff v is POSITIVE_INFINITY +> MINUS_INF iff v is NEGATIVE_INFINITY +> NAN iff v is NaN +202,208c319,325 +< * For full details see references [2] and [1]. +< * +< * For finite v != 0, determine integers c and q such that +< * |v| = c 2^q and +< * Q_MIN <= q <= Q_MAX and +< * either 2^(P-1) <= c < 2^P (normal) +< * or 0 < c < 2^(P-1) and q = Q_MIN (subnormal) +--- +> For full details see references [2] and [1]. +> +> For finite v != 0, determine integers c and q such that +> |v| = c 2^q and +> Q_MIN <= q <= Q_MAX and +> either 2^(P-1) <= c < 2^P (normal) +> or 0 < c < 2^(P-1) and q = Q_MIN (subnormal) +219c336 +< /* normal value. Here mq = -q */ +--- +> // normal value. Here mq = -q +222c339 +< /* The fast path discussed in section 8.3 of [1] */ +--- +> // The fast path discussed in section 8.2 of [1]. +232c349 +< /* subnormal value */ +--- +> // subnormal value +247,261c364,378 +< * The skeleton corresponds to figure 7 of [1]. +< * The efficient computations are those summarized in figure 9. +< * +< * Here's a correspondence between Java names and names in [1], +< * expressed as approximate LaTeX source code and informally. +< * Other names are identical. +< * cb: \bar{c} "c-bar" +< * cbr: \bar{c}_r "c-bar-r" +< * cbl: \bar{c}_l "c-bar-l" +< * +< * vb: \bar{v} "v-bar" +< * vbr: \bar{v}_r "v-bar-r" +< * vbl: \bar{v}_l "v-bar-l" +< * +< * rop: r_o' "r-o-prime" +--- +> The skeleton corresponds to figure 4 of [1]. +> The efficient computations are those summarized in figure 7. +> +> Here's a correspondence between Java names and names in [1], +> expressed as approximate LaTeX source code and informally. +> Other names are identical. +> cb: \bar{c} "c-bar" +> cbr: \bar{c}_r "c-bar-r" +> cbl: \bar{c}_l "c-bar-l" +> +> vb: \bar{v} "v-bar" +> vbr: \bar{v}_r "v-bar-r" +> vbl: \bar{v}_l "v-bar-l" +> +> rop: r_o' "r-o-prime" +269,271c386,388 +< * flog10pow2(e) = floor(log_10(2^e)) +< * flog10threeQuartersPow2(e) = floor(log_10(3/4 2^e)) +< * flog2pow10(e) = floor(log_2(10^e)) +--- +> flog10pow2(e) = floor(log_10(2^e)) +> flog10threeQuartersPow2(e) = floor(log_10(3/4 2^e)) +> flog2pow10(e) = floor(log_2(10^e)) +274c391 +< /* regular spacing */ +--- +> // regular spacing +278c395 +< /* irregular spacing */ +--- +> // irregular spacing +284c401 +< /* g1 and g0 are as in section 9.8.3 of [1], so g = g1 2^63 + g0 */ +--- +> // g1 and g0 are as in section 9.9.3 of [1], so g = g1 2^63 + g0 +295,303c412,420 +< * For n = 17, m = 1 the table in section 10 of [1] shows +< * s' = floor(s / 10) = floor(s 115_292_150_460_684_698 / 2^60) +< * = floor(s 115_292_150_460_684_698 2^4 / 2^64) +< * +< * sp10 = 10 s' +< * tp10 = 10 t' +< * upin iff u' = sp10 10^k in Rv +< * wpin iff w' = tp10 10^k in Rv +< * See section 9.3 of [1]. +--- +> For n = 17, m = 1 the table in section 10 of [1] shows +> s' = floor(s / 10) = floor(s 115_292_150_460_684_698 / 2^60) +> = floor(s 115_292_150_460_684_698 2^4 / 2^64) +> +> sp10 = 10 s' +> tp10 = 10 t' +> upin iff u' = sp10 10^k in Rv +> wpin iff w' = tp10 10^k in Rv +> See section 9.4 of [1]. +315,318c432,435 +< * 10 <= s < 100 or s >= 100 and u', w' not in Rv +< * uin iff u = s 10^k in Rv +< * win iff w = t 10^k in Rv +< * See section 9.3 of [1]. +--- +> 10 <= s < 100 or s >= 100 and u', w' not in Rv +> uin iff u = s 10^k in Rv +> win iff w = t 10^k in Rv +> See section 9.4 of [1]. +324c441 +< /* Exactly one of u or w lies in Rv */ +--- +> // Exactly one of u or w lies in Rv. +328,329c445,446 +< * Both u and w lie in Rv: determine the one closest to v. +< * See section 9.3 of [1]. +--- +> Both u and w lie in Rv: determine the one closest to v. +> See section 9.4 of [1]. +336,337c453,454 +< * Computes rop(cp g 2^(-127)), where g = g1 2^63 + g0 +< * See section 9.9 and figure 8 of [1]. +--- +> Computes rop(cp g 2^(-127)), where g = g1 2^63 + g0 +> See section 9.10 and figure 5 of [1]. +349c466 +< * Formats the decimal f 10^e. +--- +> Formats the decimal f 10^e. +353,356c470,473 +< * For details not discussed here see section 10 of [1]. +< * +< * Determine len such that +< * 10^(len-1) <= f < 10^len +--- +> For details not discussed here see section 10 of [1]. +> +> Determine len such that +> 10^(len-1) <= f < 10^len +364,367c481,484 +< * Let fp and ep be the original f and e, respectively. +< * Transform f and e to ensure +< * 10^(H-1) <= f < 10^H +< * fp 10^ep = f 10^(e-H) = 0.f 10^e +--- +> Let fp and ep be the original f and e, respectively. +> Transform f and e to ensure +> 10^(H-1) <= f < 10^H +> fp 10^ep = f 10^(e-H) = 0.f 10^e +373,384c490,501 +< * The toChars?() methods perform left-to-right digits extraction +< * using ints, provided that the arguments are limited to 8 digits. +< * Therefore, split the H = 17 digits of f into: +< * h = the most significant digit of f +< * m = the next 8 most significant digits of f +< * l = the last 8, least significant digits of f +< * +< * For n = 17, m = 8 the table in section 10 of [1] shows +< * floor(f / 10^8) = floor(193_428_131_138_340_668 f / 2^84) = +< * floor(floor(193_428_131_138_340_668 f / 2^64) / 2^20) +< * and for n = 9, m = 8 +< * floor(hm / 10^8) = floor(1_441_151_881 hm / 2^57) +--- +> The toChars?() methods perform left-to-right digits extraction +> using ints, provided that the arguments are limited to 8 digits. +> Therefore, split the H = 17 digits of f into: +> h = the most significant digit of f +> m = the next 8 most significant digits of f +> l = the last 8, least significant digits of f +> +> For n = 17, m = 8 the table in section 10 of [1] shows +> floor(f / 10^8) = floor(193_428_131_138_340_668 f / 2^84) = +> floor(floor(193_428_131_138_340_668 f / 2^64) / 2^20) +> and for n = 9, m = 8 +> floor(hm / 10^8) = floor(1_441_151_881 hm / 2^57) +402,404c519,521 +< * 0 < e <= 7: plain format without leading zeroes. +< * Left-to-right digits extraction: +< * algorithm 1 in [3], with b = 10, k = 8, n = 28. +--- +> 0 < e <= 7: plain format without leading zeroes. +> Left-to-right digits extraction: +> algorithm 1 in [3], with b = 10, k = 8, n = 28. +426c543 +< /* -3 < e <= 0: plain format with leading zeroes */ +--- +> // -3 < e <= 0: plain format with leading zeroes. +439c556 +< /* -3 >= e | e > 7: computerized scientific notation */ +--- +> // -3 >= e | e > 7: computerized scientific notation +457,458c574,575 +< * Left-to-right digits extraction: +< * algorithm 1 in [3], with b = 10, k = 8, n = 28. +--- +> Left-to-right digits extraction: +> algorithm 1 in [3], with b = 10, k = 8, n = 28. +472c589 +< /* ... but do not remove the one directly to the right of '.' */ +--- +> // ... but do not remove the one directly to the right of '.' +480,485c597,602 +< * Algorithm 1 in [3] needs computation of +< * floor((a + 1) 2^n / b^k) - 1 +< * with a < 10^8, b = 10, k = 8, n = 28. +< * Noting that +< * (a + 1) 2^n <= 10^8 2^28 < 10^17 +< * For n = 17, m = 8 the table in section 10 of [1] leads to: +--- +> Algorithm 1 in [3] needs computation of +> floor((a + 1) 2^n / b^k) - 1 +> with a < 10^8, b = 10, k = 8, n = 28. +> Noting that +> (a + 1) 2^n <= 10^8 2^28 < 10^17 +> For n = 17, m = 8 the table in section 10 of [1] leads to: +505,506c622,623 +< * For n = 3, m = 2 the table in section 10 of [1] shows +< * floor(e / 100) = floor(1_311 e / 2^17) +--- +> For n = 3, m = 2 the table in section 10 of [1] shows +> floor(e / 100) = floor(1_311 e / 2^17) +513,514c630,631 +< * For n = 2, m = 1 the table in section 10 of [1] shows +< * floor(e / 10) = floor(103 e / 2^10) +--- +> For n = 2, m = 1 the table in section 10 of [1] shows +> floor(e / 10) = floor(103 e / 2^10) +529c646 +< /* Using the deprecated constructor enhances performance */ +--- +> // Using the deprecated constructor enhances performance. diff --git a/src/python/merge_conflict_analysis_diffs/845/spork/diff_FloatToDecimal.java.txt b/src/python/merge_conflict_analysis_diffs/845/spork/diff_FloatToDecimal.java.txt new file mode 100644 index 0000000000..846513122f --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/845/spork/diff_FloatToDecimal.java.txt @@ -0,0 +1,577 @@ +2,3c2 +< * Copyright (c) 2021, 2022, Oracle and/or its affiliates. All rights reserved. +< * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. +--- +> * Copyright 2018-2020 Raffaello Giulietti +5,9c4,9 +< * This code is free software; you can redistribute it and/or modify it +< * under the terms of the GNU General Public License version 2 only, as +< * published by the Free Software Foundation. Oracle designates this +< * particular file as subject to the "Classpath" exception as provided +< * by Oracle in the LICENSE file that accompanied this code. +--- +> * Permission is hereby granted, free of charge, to any person obtaining a copy +> * of this software and associated documentation files (the "Software"), to deal +> * in the Software without restriction, including without limitation the rights +> * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> * copies of the Software, and to permit persons to whom the Software is +> * furnished to do so, subject to the following conditions: +11,15c11,12 +< * This code is distributed in the hope that it will be useful, but WITHOUT +< * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +< * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +< * version 2 for more details (a copy is included in the LICENSE file that +< * accompanied this code). +--- +> * The above copyright notice and this permission notice shall be included in +> * all copies or substantial portions of the Software. +17,23c14,20 +< * You should have received a copy of the GNU General Public License version +< * 2 along with this work; if not, write to the Free Software Foundation, +< * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. +< * +< * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA +< * or visit www.oracle.com if you need additional information or have any +< * questions. +--- +> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +> * THE SOFTWARE. +30,32d26 +< import static java.lang.Float.floatToRawIntBits; +< import static java.lang.Integer.numberOfLeadingZeros; +< +39a34,36 +> import static java.lang.Float.floatToRawIntBits; +> import static java.lang.Integer.numberOfLeadingZeros; +> +41a39,40 +> * +> * @author Raffaello Giulietti +45,56c44,55 +< * For full details about this code see the following references: +< * +< * [1] Giulietti, "The Schubfach way to render doubles", +< * https://drive.google.com/file/d/1gp5xv4CAa78SVgCeWfGqqI4FfYYYuNFb +< * +< * [2] IEEE Computer Society, "IEEE Standard for Floating-Point Arithmetic" +< * +< * [3] Bouvier & Zimmermann, "Division-Free Binary-to-Decimal Conversion" +< * +< * Divisions are avoided altogether for the benefit of those architectures +< * that do not provide specific machine instructions or where they are slow. +< * This is discussed in section 10 of [1]. +--- +> For full details about this code see the following references: +> +> [1] Giulietti, "The Schubfach way to render doubles", +> https://drive.google.com/open?id=1luHhyQF9zKlM8yJ1nebU0OgVYhfC6CBN +> +> [2] IEEE Computer Society, "IEEE Standard for Floating-Point Arithmetic" +> +> [3] Bouvier & Zimmermann, "Division-Free Binary-to-Decimal Conversion" +> +> Divisions are avoided altogether for the benefit of those architectures +> that do not provide specific machine instructions or where they are slow. +> This is discussed in section 10 of [1]. +59c58,60 +< /* The precision in bits */ +--- +> // Sources with the license are here: https://github.com/c4f7fcce9cb06515/Schubfach/blob/3c92d3c9b1fead540616c918cdfef432bca53dfa/todec/src/math/FloatToDecimal.java +> +> // The precision in bits. +62c63 +< /* Exponent width in bits */ +--- +> // Exponent width in bits. +65,66c66,67 +< /* Minimum value of the exponent: -(2^(W-1)) - P + 3 */ +< static final int Q_MIN = (-1 << (W - 1)) - P + 3; +--- +> // Minimum value of the exponent: -(2^(W-1)) - P + 3. +> static final int Q_MIN = (-1 << W - 1) - P + 3; +68,69c69,70 +< /* Maximum value of the exponent: 2^(W-1) - P */ +< static final int Q_MAX = (1 << (W - 1)) - P; +--- +> // Maximum value of the exponent: 2^(W-1) - P. +> static final int Q_MAX = (1 << W - 1) - P; +71c72 +< /* 10^(E_MIN - 1) <= MIN_VALUE < 10^E_MIN */ +--- +> // 10^(E_MIN - 1) <= MIN_VALUE < 10^E_MIN +74c75 +< /* 10^(E_MAX - 1) <= MAX_VALUE < 10^E_MAX */ +--- +> // 10^(E_MAX - 1) <= MAX_VALUE < 10^E_MAX +77c78 +< /* Threshold to detect tiny values, as in section 8.2.1 of [1] */ +--- +> // Threshold to detect tiny values, as in section 8.1.1 of [1] +80c81 +< /* The minimum and maximum k, as in section 8 of [1] */ +--- +> // The minimum and maximum k, as in section 8 of [1] +84c85 +< /* H is as in section 8.1 of [1] */ +--- +> // H is as in section 8 of [1]. +87,88c88,89 +< /* Minimum value of the significand of a normal value: 2^(P-1) */ +< private static final int C_MIN = 1 << (P - 1); +--- +> // Minimum value of the significand of a normal value: 2^(P-1). +> private static final int C_MIN = 1 << P - 1; +90c91 +< /* Mask to extract the biased exponent */ +--- +> // Mask to extract the biased exponent. +93,94c94,95 +< /* Mask to extract the fraction bits */ +< private static final int T_MASK = (1 << (P - 1)) - 1; +--- +> // Mask to extract the fraction bits. +> private static final int T_MASK = (1 << P - 1) - 1; +96c97 +< /* Used in rop() */ +--- +> // Used in rop(). +99c100 +< /* Used for left-to-tight digit extraction */ +--- +> // Used for left-to-tight digit extraction. +102,107c103,112 +< private static final int NON_SPECIAL = 0; +< private static final int PLUS_ZERO = 1; +< private static final int MINUS_ZERO = 2; +< private static final int PLUS_INF = 3; +< private static final int MINUS_INF = 4; +< private static final int NAN = 5; +--- +> private static final int NON_SPECIAL = 0; +> private static final int PLUS_ZERO = 1; +> private static final int MINUS_ZERO = 2; +> private static final int PLUS_INF = 3; +> private static final int MINUS_INF = 4; +> private static final int NAN = 5; +> +> // For thread-safety, each thread gets its own instance of this class. +> private static final ThreadLocal threadLocal = +> ThreadLocal.withInitial(FloatToDecimal::new); +110,114c115,119 +< * Room for the longer of the forms +< * -ddddd.dddd H + 2 characters +< * -0.00ddddddddd H + 5 characters +< * -d.ddddddddE-ee H + 6 characters +< * where there are H digits d +--- +> Room for the longer of the forms +> -ddddd.dddd H + 2 characters +> -0.00ddddddddd H + 5 characters +> -d.ddddddddE-ee H + 6 characters +> where there are H digits d +116c121 +< public static final int MAX_CHARS = H + 6; +--- +> public final int MAX_CHARS = H + 6; +117a123 +> // Numerical results are created here... +120c126,129 +< /* Index into bytes of rightmost valid character */ +--- +> // ... and copied here in appendTo() +> private final char[] chars = new char[MAX_CHARS]; +> +> // Index into buf of rightmost valid character. +127,128c136,168 +< * Returns a string representation of the {@code float} +< * argument. All characters mentioned below are ASCII characters. +--- +> * Returns a string rendering of the {@code float} argument. +> * +> *

    The characters of the result are all drawn from the ASCII set. +> *

      +> *
    • Any NaN, whether quiet or signaling, is rendered as +> * {@code "NaN"}, regardless of the sign bit. +> *
    • The infinities +∞ and -∞ are rendered as +> * {@code "Infinity"} and {@code "-Infinity"}, respectively. +> *
    • The positive and negative zeroes are rendered as +> * {@code "0.0"} and {@code "-0.0"}, respectively. +> *
    • A finite negative {@code v} is rendered as the sign +> * '{@code -}' followed by the rendering of the magnitude -{@code v}. +> *
    • A finite positive {@code v} is rendered in two stages: +> *
        +> *
      • Selection of a decimal: A well-defined +> * decimal dv is selected +> * to represent {@code v}. +> *
      • Formatting as a string: The decimal +> * dv is formatted as a string, +> * either in plain or in computerized scientific notation, +> * depending on its value. +> *
      +> *
    +> * +> *

    A decimal is a number of the form +> * d×10i +> * for some (unique) integers d > 0 and i such that +> * d is not a multiple of 10. +> * These integers are the significand and +> * the exponent, respectively, of the decimal. +> * The length of the decimal is the (unique) +> * integer n meeting +> * 10n-1d < 10n. +130,132c170,246 +< * @param v the {@code float} to be converted. +< * @return a string representation of the argument. +< * @see Float#toString(float) +--- +> *

    The decimal dv +> * for a finite positive {@code v} is defined as follows: +> *

      +> *
    • Let R be the set of all decimals that round to {@code v} +> * according to the usual round-to-closest rule of +> * IEEE 754 floating-point arithmetic. +> *
    • Let m be the minimal length over all decimals in R. +> *
    • When m ≥ 2, let T be the set of all decimals +> * in R with length m. +> * Otherwise, let T be the set of all decimals +> * in R with length 1 or 2. +> *
    • Define dv as +> * the decimal in T that is closest to {@code v}. +> * Or if there are two such decimals in T, +> * select the one with the even significand (there is exactly one). +> *
    +> * +> *

    The (uniquely) selected decimal dv +> * is then formatted. +> * +> *

    Let d, i and n be the significand, exponent and +> * length of dv, respectively. +> * Further, let e = n + i - 1 and let +> * d1dn +> * be the usual decimal expansion of the significand. +> * Note that d1 ≠ 0 ≠ dn. +> *

      +> *
    • Case -3 ≤ e < 0: +> * dv is formatted as +> * 0.00d1dn, +> * where there are exactly -(n + i) zeroes between +> * the decimal point and d1. +> * For example, 123 × 10-4 is formatted as +> * {@code 0.0123}. +> *
    • Case 0 ≤ e < 7: +> *
        +> *
      • Subcase i ≥ 0: +> * dv is formatted as +> * d1dn00.0, +> * where there are exactly i zeroes +> * between dn and the decimal point. +> * For example, 123 × 102 is formatted as +> * {@code 12300.0}. +> *
      • Subcase i < 0: +> * dv is formatted as +> * d1dn+i.dn+i+1dn. +> * There are exactly -i digits to the right of +> * the decimal point. +> * For example, 123 × 10-1 is formatted as +> * {@code 12.3}. +> *
      +> *
    • Case e < -3 or e ≥ 7: +> * computerized scientific notation is used to format +> * dv. +> * Here e is formatted as by {@link Integer#toString(int)}. +> *
        +> *
      • Subcase n = 1: +> * dv is formatted as +> * d1.0Ee. +> * For example, 1 × 1023 is formatted as +> * {@code 1.0E23}. +> *
      • Subcase n > 1: +> * dv is formatted as +> * d1.d2dnEe. +> * For example, 123 × 10-21 is formatted as +> * {@code 1.23E-19}. +> *
      +> *
    +> * +> * @param v the {@code float} to be rendered. +> * @return a string rendering of the argument. +135c249 +< return new FloatToDecimal().toDecimalString(v); +--- +> return threadLocalInstance().toDecimalString(v); +151c265,269 +< return new FloatToDecimal().appendDecimalTo(v, app); +--- +> return threadLocalInstance().appendDecimalTo(v, app); +> } +> +> private static FloatToDecimal threadLocalInstance() { +> return threadLocal.get(); +169,170c287 +< char[] chars = new char[index + 1]; +< for (int i = 0; i < chars.length; ++i) { +--- +> for (int i = 0; i <= index; ++i) { +174c291 +< return ((StringBuilder) app).append(chars); +--- +> return ((StringBuilder) app).append(chars, 0, index + 1); +177c294 +< return ((StringBuffer) app).append(chars); +--- +> return ((StringBuffer) app).append(chars, 0, index + 1); +179,180c296,297 +< for (char c : chars) { +< app.append(c); +--- +> for (int i = 0; i <= index; ++i) { +> app.append(chars[i]); +192,197c309,314 +< * Returns +< * PLUS_ZERO iff v is 0.0 +< * MINUS_ZERO iff v is -0.0 +< * PLUS_INF iff v is POSITIVE_INFINITY +< * MINUS_INF iff v is NEGATIVE_INFINITY +< * NAN iff v is NaN +--- +> Returns +> PLUS_ZERO iff v is 0.0 +> MINUS_ZERO iff v is -0.0 +> PLUS_INF iff v is POSITIVE_INFINITY +> MINUS_INF iff v is NEGATIVE_INFINITY +> NAN iff v is NaN +201,207c318,324 +< * For full details see references [2] and [1]. +< * +< * For finite v != 0, determine integers c and q such that +< * |v| = c 2^q and +< * Q_MIN <= q <= Q_MAX and +< * either 2^(P-1) <= c < 2^P (normal) +< * or 0 < c < 2^(P-1) and q = Q_MIN (subnormal) +--- +> For full details see references [2] and [1]. +> +> For finite v != 0, determine integers c and q such that +> |v| = c 2^q and +> Q_MIN <= q <= Q_MAX and +> either 2^(P-1) <= c < 2^P (normal) +> or 0 < c < 2^(P-1) and q = Q_MIN (subnormal) +218c335 +< /* normal value. Here mq = -q */ +--- +> // normal value. Here mq = -q +221c338 +< /* The fast path discussed in section 8.3 of [1] */ +--- +> // The fast path discussed in section 8.2 of [1]. +231c348 +< /* subnormal value */ +--- +> // subnormal value +246,261c363,378 +< * The skeleton corresponds to figure 7 of [1]. +< * The efficient computations are those summarized in figure 9. +< * Also check the appendix. +< * +< * Here's a correspondence between Java names and names in [1], +< * expressed as approximate LaTeX source code and informally. +< * Other names are identical. +< * cb: \bar{c} "c-bar" +< * cbr: \bar{c}_r "c-bar-r" +< * cbl: \bar{c}_l "c-bar-l" +< * +< * vb: \bar{v} "v-bar" +< * vbr: \bar{v}_r "v-bar-r" +< * vbl: \bar{v}_l "v-bar-l" +< * +< * rop: r_o' "r-o-prime" +--- +> The skeleton corresponds to figure 4 of [1]. +> The efficient computations are those summarized in figure 7. +> Also check the appendix. +> +> Here's a correspondence between Java names and names in [1], +> expressed as approximate LaTeX source code and informally. +> Other names are identical. +> cb: \bar{c} "c-bar" +> cbr: \bar{c}_r "c-bar-r" +> cbl: \bar{c}_l "c-bar-l" +> +> vb: \bar{v} "v-bar" +> vbr: \bar{v}_r "v-bar-r" +> vbl: \bar{v}_l "v-bar-l" +> +> rop: r_o' "r-o-prime" +269,271c386,388 +< * flog10pow2(e) = floor(log_10(2^e)) +< * flog10threeQuartersPow2(e) = floor(log_10(3/4 2^e)) +< * flog2pow10(e) = floor(log_2(10^e)) +--- +> flog10pow2(e) = floor(log_10(2^e)) +> flog10threeQuartersPow2(e) = floor(log_10(3/4 2^e)) +> flog2pow10(e) = floor(log_2(10^e)) +274c391 +< /* regular spacing */ +--- +> // regular spacing +278c395 +< /* irregular spacing */ +--- +> // irregular spacing0 +284c401 +< /* g is as in the appendix */ +--- +> // g is as in the appendix +294,301c411,418 +< * For n = 9, m = 1 the table in section 10 of [1] shows +< * s' = floor(s / 10) = floor(s 1_717_986_919 / 2^34) +< * +< * sp10 = 10 s' +< * tp10 = 10 t' +< * upin iff u' = sp10 10^k in Rv +< * wpin iff w' = tp10 10^k in Rv +< * See section 9.3 of [1]. +--- +> For n = 9, m = 1 the table in section 10 of [1] shows +> s' = floor(s / 10) = floor(s 1_717_986_919 / 2^34) +> +> sp10 = 10 s' +> tp10 = 10 t' +> upin iff u' = sp10 10^k in Rv +> wpin iff w' = tp10 10^k in Rv +> See section 9.4 of [1]. +313,316c430,433 +< * 10 <= s < 100 or s >= 100 and u', w' not in Rv +< * uin iff u = s 10^k in Rv +< * win iff w = t 10^k in Rv +< * See section 9.3 of [1]. +--- +> 10 <= s < 100 or s >= 100 and u', w' not in Rv +> uin iff u = s 10^k in Rv +> win iff w = t 10^k in Rv +> See section 9.4 of [1]. +322c439 +< /* Exactly one of u or w lies in Rv */ +--- +> // Exactly one of u or w lies in Rv. +326,327c443,444 +< * Both u and w lie in Rv: determine the one closest to v. +< * See section 9.3 of [1]. +--- +> Both u and w lie in Rv: determine the one closest to v. +> See section 9.4 of [1]. +334,335c451,452 +< * Computes rop(cp g 2^(-95)) +< * See appendix and figure 11 of [1]. +--- +> Computes rop(cp g 2^(-95)) +> See appendix and figure 8 of [1]. +344c461 +< * Formats the decimal f 10^e. +--- +> Formats the decimal f 10^e. +348,351c465,468 +< * For details not discussed here see section 10 of [1]. +< * +< * Determine len such that +< * 10^(len-1) <= f < 10^len +--- +> For details not discussed here see section 10 of [1]. +> +> Determine len such that +> 10^(len-1) <= f < 10^len +359,362c476,479 +< * Let fp and ep be the original f and e, respectively. +< * Transform f and e to ensure +< * 10^(H-1) <= f < 10^H +< * fp 10^ep = f 10^(e-H) = 0.f 10^e +--- +> Let fp and ep be the original f and e, respectively. +> Transform f and e to ensure +> 10^(H-1) <= f < 10^H +> fp 10^ep = f 10^(e-H) = 0.f 10^e +364c481 +< f *= (int)pow10(H - len); +--- +> f *= pow10(H - len); +368,375c485,492 +< * The toChars?() methods perform left-to-right digits extraction +< * using ints, provided that the arguments are limited to 8 digits. +< * Therefore, split the H = 9 digits of f into: +< * h = the most significant digit of f +< * l = the last 8, least significant digits of f +< * +< * For n = 9, m = 8 the table in section 10 of [1] shows +< * floor(f / 10^8) = floor(1_441_151_881 f / 2^57) +--- +> The toChars?() methods perform left-to-right digits extraction +> using ints, provided that the arguments are limited to 8 digits. +> Therefore, split the H = 9 digits of f into: +> h = the most significant digit of f +> l = the last 8, least significant digits of f +> +> For n = 9, m = 8 the table in section 10 of [1] shows +> floor(f / 10^8) = floor(1_441_151_881 f / 2^57) +391,393c508,510 +< * 0 < e <= 7: plain format without leading zeroes. +< * Left-to-right digits extraction: +< * algorithm 1 in [3], with b = 10, k = 8, n = 28. +--- +> 0 < e <= 7: plain format without leading zeroes. +> Left-to-right digits extraction: +> algorithm 1 in [3], with b = 10, k = 8, n = 28. +415c532 +< /* -3 < e <= 0: plain format with leading zeroes */ +--- +> // -3 < e <= 0: plain format with leading zeroes. +428c545 +< /* -3 >= e | e > 7: computerized scientific notation */ +--- +> // -3 >= e | e > 7: computerized scientific notation +439,440c556,557 +< * Left-to-right digits extraction: +< * algorithm 1 in [3], with b = 10, k = 8, n = 28. +--- +> Left-to-right digits extraction: +> algorithm 1 in [3], with b = 10, k = 8, n = 28. +454c571 +< /* ... but do not remove the one directly to the right of '.' */ +--- +> // ... but do not remove the one directly to the right of '.' +462,467c579,584 +< * Algorithm 1 in [3] needs computation of +< * floor((a + 1) 2^n / b^k) - 1 +< * with a < 10^8, b = 10, k = 8, n = 28. +< * Noting that +< * (a + 1) 2^n <= 10^8 2^28 < 10^17 +< * For n = 17, m = 8 the table in section 10 of [1] leads to: +--- +> Algorithm 1 in [3] needs computation of +> floor((a + 1) 2^n / b^k) - 1 +> with a < 10^8, b = 10, k = 8, n = 28. +> Noting that +> (a + 1) 2^n <= 10^8 2^28 < 10^17 +> For n = 17, m = 8 the table in section 10 of [1] leads to: +485,486c602,603 +< * For n = 2, m = 1 the table in section 10 of [1] shows +< * floor(e / 10) = floor(103 e / 2^10) +--- +> For n = 2, m = 1 the table in section 10 of [1] shows +> floor(e / 10) = floor(103 e / 2^10) +501c618 +< /* Using the deprecated constructor enhances performance */ +--- +> // Using the deprecated constructor enhances performance. +507a625 +> diff --git a/src/python/merge_conflict_analysis_diffs/849/git_hires_merge/diff_FilteringGeneratorDelegate.java.txt b/src/python/merge_conflict_analysis_diffs/849/git_hires_merge/diff_FilteringGeneratorDelegate.java.txt new file mode 100644 index 0000000000..4d9f242eee --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/849/git_hires_merge/diff_FilteringGeneratorDelegate.java.txt @@ -0,0 +1,6 @@ +267a268 +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, null, false); +302a304 +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); +337a340 +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); diff --git a/src/python/merge_conflict_analysis_diffs/849/gitmerge_ort/diff_FilteringGeneratorDelegate.java.txt b/src/python/merge_conflict_analysis_diffs/849/gitmerge_ort/diff_FilteringGeneratorDelegate.java.txt new file mode 100644 index 0000000000..6f6b00a158 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/849/gitmerge_ort/diff_FilteringGeneratorDelegate.java.txt @@ -0,0 +1,33 @@ +268c268 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, null, false); +270,275d269 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +310c304 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); +312,317d305 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +352c340 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); +354,359d341 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java diff --git a/src/python/merge_conflict_analysis_diffs/849/gitmerge_ort_adjacent/diff_FilteringGeneratorDelegate.java.txt b/src/python/merge_conflict_analysis_diffs/849/gitmerge_ort_adjacent/diff_FilteringGeneratorDelegate.java.txt new file mode 100644 index 0000000000..4d9f242eee --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/849/gitmerge_ort_adjacent/diff_FilteringGeneratorDelegate.java.txt @@ -0,0 +1,6 @@ +267a268 +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, null, false); +302a304 +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); +337a340 +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); diff --git a/src/python/merge_conflict_analysis_diffs/849/gitmerge_ort_ignorespace/diff_FilteringGeneratorDelegate.java.txt b/src/python/merge_conflict_analysis_diffs/849/gitmerge_ort_ignorespace/diff_FilteringGeneratorDelegate.java.txt new file mode 100644 index 0000000000..6f6b00a158 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/849/gitmerge_ort_ignorespace/diff_FilteringGeneratorDelegate.java.txt @@ -0,0 +1,33 @@ +268c268 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, null, false); +270,275d269 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +310c304 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); +312,317d305 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +352c340 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); +354,359d341 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java diff --git a/src/python/merge_conflict_analysis_diffs/849/gitmerge_ort_imports/diff_FilteringGeneratorDelegate.java.txt b/src/python/merge_conflict_analysis_diffs/849/gitmerge_ort_imports/diff_FilteringGeneratorDelegate.java.txt new file mode 100644 index 0000000000..4d9f242eee --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/849/gitmerge_ort_imports/diff_FilteringGeneratorDelegate.java.txt @@ -0,0 +1,6 @@ +267a268 +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, null, false); +302a304 +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); +337a340 +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); diff --git a/src/python/merge_conflict_analysis_diffs/849/gitmerge_ort_imports_ignorespace/diff_FilteringGeneratorDelegate.java.txt b/src/python/merge_conflict_analysis_diffs/849/gitmerge_ort_imports_ignorespace/diff_FilteringGeneratorDelegate.java.txt new file mode 100644 index 0000000000..4d9f242eee --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/849/gitmerge_ort_imports_ignorespace/diff_FilteringGeneratorDelegate.java.txt @@ -0,0 +1,6 @@ +267a268 +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, null, false); +302a304 +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); +337a340 +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); diff --git a/src/python/merge_conflict_analysis_diffs/849/gitmerge_recursive_histogram/diff_FilteringGeneratorDelegate.java.txt b/src/python/merge_conflict_analysis_diffs/849/gitmerge_recursive_histogram/diff_FilteringGeneratorDelegate.java.txt new file mode 100644 index 0000000000..6f6b00a158 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/849/gitmerge_recursive_histogram/diff_FilteringGeneratorDelegate.java.txt @@ -0,0 +1,33 @@ +268c268 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, null, false); +270,275d269 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +310c304 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); +312,317d305 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +352c340 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); +354,359d341 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java diff --git a/src/python/merge_conflict_analysis_diffs/849/gitmerge_recursive_ignorespace/diff_FilteringGeneratorDelegate.java.txt b/src/python/merge_conflict_analysis_diffs/849/gitmerge_recursive_ignorespace/diff_FilteringGeneratorDelegate.java.txt new file mode 100644 index 0000000000..6f6b00a158 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/849/gitmerge_recursive_ignorespace/diff_FilteringGeneratorDelegate.java.txt @@ -0,0 +1,33 @@ +268c268 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, null, false); +270,275d269 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +310c304 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); +312,317d305 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +352c340 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); +354,359d341 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java diff --git a/src/python/merge_conflict_analysis_diffs/849/gitmerge_recursive_minimal/diff_FilteringGeneratorDelegate.java.txt b/src/python/merge_conflict_analysis_diffs/849/gitmerge_recursive_minimal/diff_FilteringGeneratorDelegate.java.txt new file mode 100644 index 0000000000..6f6b00a158 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/849/gitmerge_recursive_minimal/diff_FilteringGeneratorDelegate.java.txt @@ -0,0 +1,33 @@ +268c268 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, null, false); +270,275d269 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +310c304 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); +312,317d305 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +352c340 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); +354,359d341 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java diff --git a/src/python/merge_conflict_analysis_diffs/849/gitmerge_recursive_myers/diff_FilteringGeneratorDelegate.java.txt b/src/python/merge_conflict_analysis_diffs/849/gitmerge_recursive_myers/diff_FilteringGeneratorDelegate.java.txt new file mode 100644 index 0000000000..6f6b00a158 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/849/gitmerge_recursive_myers/diff_FilteringGeneratorDelegate.java.txt @@ -0,0 +1,33 @@ +268c268 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, null, false); +270,275d269 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +310c304 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); +312,317d305 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +352c340 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); +354,359d341 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java diff --git a/src/python/merge_conflict_analysis_diffs/849/gitmerge_recursive_patience/diff_FilteringGeneratorDelegate.java.txt b/src/python/merge_conflict_analysis_diffs/849/gitmerge_recursive_patience/diff_FilteringGeneratorDelegate.java.txt new file mode 100644 index 0000000000..6f6b00a158 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/849/gitmerge_recursive_patience/diff_FilteringGeneratorDelegate.java.txt @@ -0,0 +1,33 @@ +268c268 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, null, false); +270,275d269 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +310c304 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); +312,317d305 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +352c340 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); +354,359d341 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java diff --git a/src/python/merge_conflict_analysis_diffs/849/intellimerge/diff_FilteringGeneratorDelegate.java.txt b/src/python/merge_conflict_analysis_diffs/849/intellimerge/diff_FilteringGeneratorDelegate.java.txt new file mode 100644 index 0000000000..6f6b00a158 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/849/intellimerge/diff_FilteringGeneratorDelegate.java.txt @@ -0,0 +1,33 @@ +268c268 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, null, false); +270,275d269 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +310c304 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); +312,317d305 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +352c340 +< <<<<<<< HEAD:src/main/java/tools/jackson/core/filter/FilteringGeneratorDelegate.java +--- +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); +354,359d341 +< ||||||| 287ec3223:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java +< return; +< ======= +< _filterContext = _filterContext.createChildObjectContext(null, false); +< return; +< >>>>>>> TEMP_RIGHT_BRANCH:src/main/java/com/fasterxml/jackson/core/filter/FilteringGeneratorDelegate.java diff --git a/src/python/merge_conflict_analysis_diffs/849/spork/diff_FilteringGeneratorDelegate.java.txt b/src/python/merge_conflict_analysis_diffs/849/spork/diff_FilteringGeneratorDelegate.java.txt new file mode 100644 index 0000000000..4d9f242eee --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/849/spork/diff_FilteringGeneratorDelegate.java.txt @@ -0,0 +1,6 @@ +267a268 +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, null, false); +302a304 +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); +337a340 +> _filterContext = _filterContext.createChildObjectContext(_itemFilter, currValue, false); diff --git a/src/python/merge_conflict_analysis_diffs/921/git_hires_merge/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/921/git_hires_merge/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..67785841ec --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/921/git_hires_merge/diff_VERSION-2.x.txt @@ -0,0 +1,259 @@ +====1 +1:7,12c + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) +2:7c +3:7c + 2.10.5.1 (02-Dec-2020) +====1 +1:17,18c + #2854: Block one more gadget type (javax.swing, CVE-2020-xxx) + (reported by Yangkun(ICSL)) +2:11a +3:11a +====1 +1:20c + 2.9.10.6 (24-Aug-2020) +2:13,179c +3:13,179c + 2.10.5 (21-Jul-2020) + + #2787 (partial fix): NPE after add mixin for enum + (reported by Denis K) + + 2.10.4 (03-May-2020) + + #2679: `ObjectMapper.readValue("123", Void.TYPE)` throws "should never occur" + (reported by Endre S) + + 2.10.3 (03-Mar-2020) + + #2482: `JSONMappingException` `Location` column number is one line Behind the actual + location + (reported by Kamal A, fixed by Ivo S) + #2599: NoClassDefFoundError at DeserializationContext. on Android 4.1.2 + and Jackson 2.10.0 + (reported by Tobias P) + #2602: ByteBufferSerializer produces unexpected results with a duplicated ByteBuffer + and a position > 0 + (reported by Eduard T) + #2605: Failure to deserializer polymorphic subtypes of base type `Enum` + (reported by uewle@github) + #2610: `EXTERNAL_PROPERTY` doesn't work with `@JsonIgnoreProperties` + (reported, fix suggested by Alexander S) + + 2.10.2 (05-Jan-2020) + + #2101: `FAIL_ON_NULL_FOR_PRIMITIVES` failure does not indicate field name in exception message + (reported by raderio@github) + + 2.10.1 (09-Nov-2019) + + #2457: Extended enum values are not handled as enums when used as Map keys + (reported by Andrey K) + #2473: Array index missing in path of `JsonMappingException` for `Collection`, + with custom deserializer + (reported by João G) + #2475: `StringCollectionSerializer` calls `JsonGenerator.setCurrentValue(value)`, + which messes up current value for sibling properties + (reported by Ryan B) + #2485: Add `uses` for `Module` in module-info + (contributed by Marc M) + #2513: BigDecimalAsStringSerializer in NumberSerializer throws IllegalStateException in 2.10 + (reported by Johan H) + #2519: Serializing `BigDecimal` values inside containers ignores shape override + (reported by Richard W) + #2520: Sub-optimal exception message when failing to deserialize non-static inner classes + (reported by Mark S) + #2529: Add tests to ensure `EnumSet` and `EnumMap` work correctly with "null-as-empty" + #2534: Add `BasicPolymorphicTypeValidator.Builder.allowIfSubTypeIsArray()` + #2535: Allow String-to-byte[] coercion for String-value collections + + 2.10.0 (26-Sep-2019) + + #18: Make `JsonNode` serializable + #1093: Default typing does not work with `writerFor(Object.class)` + (reported by hoomanv@github) + #1675: Remove "impossible" `IOException` in `readTree()` and `readValue()` `ObjectMapper` + methods which accept Strings + (requested by matthew-pwnieexpress@github) + #1954: Add Builder pattern for creating configured `ObjectMapper` instances + #1995: Limit size of `DeserializerCache`, auto-flush on exceeding + #2059: Remove `final` modifier for `TypeFactory` + (requested by Thibaut R) + #2077: `JsonTypeInfo` with a subtype having `JsonFormat.Shape.ARRAY` and + no fields generates `{}` not `[]` + (reported by Sadayuki F) + #2115: Support naive deserialization of `Serializable` values as "untyped", same + as `java.lang.Object` + (requested by Christopher S) + #2116: Make NumberSerializers.Base public and its inherited classes not final + (requested by Édouard M) + #2126: `DeserializationContext.instantiationException()` throws `InvalidDefinitionException` + #2129: Add `SerializationFeature.WRITE_ENUM_KEYS_USING_INDEX`, separate from value setting + (suggested by renzihui@github) + #2133: Improve `DeserializationProblemHandler.handleUnexpectedToken()` to allow handling of + Collection problems + (contributed by Semyon L) + #2149: Add `MapperFeature.ACCEPT_CASE_INSENSITIVE_VALUES` + (suggested by Craig P) + #2153: Add `JsonMapper` to replace generic `ObjectMapper` usage + #2164: `FactoryBasedEnumDeserializer` does not respect + `DeserializationFeature.WRAP_EXCEPTIONS` + (reported by Yiqiu H) + #2187: Make `JsonNode.toString()` use shared `ObjectMapper` to produce valid json + #2189: `TreeTraversingParser` does not check int bounds + (reported by Alexander S) + #2195: Add abstraction `PolymorphicTypeValidator`, for limiting subtypes allowed by + default typing, `@JsonTypeInfo` + #2196: Type safety for `readValue()` with `TypeReference` + (suggested by nguyenfilip@github) + #2204: Add `JsonNode.isEmpty()` as convenience alias + #2211: Change of behavior (2.8 -> 2.9) with `ObjectMapper.readTree(input)` with no content + #2217: Suboptimal memory allocation in `TextNode.getBinaryValue()` + (reported by Christoph B) + #2220: Force serialization always for `convertValue()`; avoid short-cuts + #2223: Add `missingNode()` method in `JsonNodeFactory` + #2227: Minor cleanup of exception message for `Enum` binding failure + (reported by RightHandedMonkey@github) + #2230: `WRITE_BIGDECIMAL_AS_PLAIN` is ignored if `@JsonFormat` is used + (reported by Pavel C) + #2236: Type id not provided on `Double.NaN`, `Infinity` with `@JsonTypeInfo` + (reported by C-B-B@github) + #2237: Add "required" methods in `JsonNode`: `required(String | int)`, + `requiredAt(JsonPointer)` + #2241: Add `PropertyNamingStrategy.LOWER_DOT_CASE` for dot-delimited names + (contributed by zenglian@github.com) + #2251: Getter that returns an abstract collection breaks a delegating `@JsonCreator` + #2265: Inconsistent handling of Collections$UnmodifiableList vs Collections$UnmodifiableRandomAccessList + #2273: Add basic Java 9+ module info + #2280: JsonMerge not work with constructor args + (reported by Deblock T) + #2309: READ_ENUMS_USING_TO_STRING doesn't support null values + (reported, fix suggested by Ben A) + #2311: Unnecessary MultiView creation for property writers + (suggested by Manuel H) + #2331: `JsonMappingException` through nested getter with generic wildcard return type + (reported by sunchezz89@github) + #2336: `MapDeserializer` can not merge `Map`s with polymorphic values + (reported by Robert G) + #2338: Suboptimal return type for `JsonNode.withArray()` + (reported by Victor N) + #2339: Suboptimal return type for `ObjectNode.set()` + (reported by Victor N) + #2348: Add sanity checks for `ObjectMapper.readXXX()` methods + (requested by ebundy@github) + #2349: Add option `DefaultTyping.EVERYTHING` to support Kotlin data classes + #2357: Lack of path on MismatchedInputException + (suggested by TheEin@github) + #2378: `@JsonAlias` doesn't work with AutoValue + (reported by David H) + #2390: `Iterable` serialization breaks when adding `@JsonFilter` annotation + (reported by Chris M) + #2392: `BeanDeserializerModifier.modifyDeserializer()` not applied to custom bean deserializers + (reported by andreasbaus@github) + #2393: `TreeTraversingParser.getLongValue()` incorrectly checks `canConvertToInt()` + (reported by RabbidDog@github) + #2398: Replace recursion in `TokenBuffer.copyCurrentStructure()` with iteration + (reported by Sam S) + #2415: Builder-based POJO deserializer should pass builder instance, not type, + to `handleUnknownVanilla()` + (proposed by Vladimir T, follow up to #822) + #2416: Optimize `ValueInstantiator` construction for default `Collection`, `Map` types + #2422: `scala.collection.immutable.ListMap` fails to serialize since 2.9.3 + (reported by dejanlokar1@github) + #2424: Add global config override setting for `@JsonFormat.lenient()` + #2428: Use "activateDefaultTyping" over "enableDefaultTyping" in 2.10 with new methods + #2430: Change `ObjectMapper.valueToTree()` to convert `null` to `NullNode` + #2432: Add support for module bundles + (contributed by Marcos P) + #2433: Improve `NullNode.equals()` + (suggested by David B) + #2442: `ArrayNode.addAll()` adds raw `null` values which cause NPE on `deepCopy()` + and `toString()` + (reported, fix contributed by Hesham M) + #2446: Java 11: Unable to load JDK7 types (annotations, java.nio.file.Path): no Java7 support added + (reported by David C) + #2451: Add new `JsonValueFormat` value, `UUID` + #2453: Add `DeserializationContext.readTree(JsonParser)` convenience method + #2458: `Nulls` property metadata ignored for creators + (reported by XakepSDK@github) + #2466: Didn't find class "java.nio.file.Path" below Android api 26 + (reported by KevynBct@github) + #2467: Accept `JsonTypeInfo.As.WRAPPER_ARRAY` with no second argument to + deserialize as "null value" + (contributed by Martin C) +====1 +1:22,86c + #2798: Block one more gadget type (com.pastdev.httpcomponents, CVE-2020-24750) + (reported by Al1ex@knownsec) + #2814: Block one more gadget type (Anteros-DBCP, CVE-2020-24616) + (reported by ChenZhaojun) + #2826: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + #2827: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + + 2.9.10.5 (21-Jun-2020) + + #2688: Block one more gadget type (apache-drill, CVE-2020-14060) + (reported by Topsec(tcc)) + #2698: Block one more gadget type (weblogic/oracle-aqjms, CVE-2020-14061) + (reported by Fangrun Li) + #2704: Block one more gadget type (jaxp-ri, CVE-2020-14062) + (reported by XuYuanzhen) + #2765: Block one more gadget type (org.jsecurity, CVE-2020-14195) + (reported by Al1ex@knownsec) + + 2.9.10.4 (11-Apr-2020) + + #2631: Block one more gadget type (shaded-hikari-config, CVE-2020-9546) + (reported by threedr3am & LFY) + #2634: Block two more gadget types (ibatis-sqlmap, anteros-core; CVE-2020-9547 / CVE-2020-9548) + (reported by threedr3am & V1ZkRA) + #2642: Block one more gadget type (javax.swing, CVE-2020-10969) + (reported by threedr3am) + #2648: Block one more gadget type (shiro-core) + #2653: Block one more gadget type (shiro-core) + #2658: Block one more gadget type (ignite-jta, CVE-2020-10650) + (reported by Srikanth Ramu, threedr3am'follower) + #2659: Block one more gadget type (aries.transaction.jms, CVE-2020-10672) + (reported by Srikanth Ramu) + #2660: Block one more gadget type (caucho-quercus, CVE-2020-10673) + (reported by threedr3am'follower) + #2662: Block one more gadget type (bus-proxy, CVE-2020-10968) + (reported by XuYuanzhen) + #2664: Block one more gadget type (activemq-pool[-jms], CVE-2020-11111) + (reported by Srikanth Ramu) + #2666: Block one more gadget type (apache/commons-proxy, CVE-2020-11112) + (reported by Yiting Fan) + #2670: Block one more gadget type (openjpa, CVE-2020-11113) + (reported by XuYuanzhen) + #2680: Block one more gadget type (SSRF, spring-aop, CVE-2020-11619) + #2682: Block one more gadget type (commons-jelly, CVE-2020-11620) + + 2.9.10.3 (23-Feb-2020) + + #2620: Block one more gadget type (xbean-reflect/JNDI - CVE-2020-8840) + (reported by threedr3am@github) + + 2.9.10.2 (03-Jan-2020) + + #2526: Block two more gadget types (ehcache/JNDI - CVE-2019-20330) + (repoerted by UltramanGaia) + #2544: java.lang.NoClassDefFoundError Thrown for compact profile1 + (reported by Jon A) + + 2.9.10.1 (20-Oct-2019) + + #2478: Block two more gadget types (commons-dbcp, p6spy, + CVE-2019-16942 / CVE-2019-16943) + (reported by b5mali4 / root@codersec.net) + #2498: Block one more gadget type (log4j-extras/1.2, CVE-2019-17531) +2:181c +3:181c + [2.9.10.x micro-patches omitted] diff --git a/src/python/merge_conflict_analysis_diffs/921/gitmerge_ort/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/921/gitmerge_ort/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..f1911744f6 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/921/gitmerge_ort/diff_VERSION-2.x.txt @@ -0,0 +1,278 @@ +==== +1:7,12c + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) +2:7,25c + <<<<<<< HEAD + 2.10.5.1 (02-Dec-2020) + ||||||| 3d2903e8a + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) + ======= + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + #2996: Block 2 more gadget types (placeholder) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) + >>>>>>> TEMP_RIGHT_BRANCH +3:7c + 2.10.5.1 (02-Dec-2020) +====1 +1:17,18c + #2854: Block one more gadget type (javax.swing, CVE-2020-xxx) + (reported by Yangkun(ICSL)) +2:29a +3:11a +====1 +1:20c + 2.9.10.6 (24-Aug-2020) +2:31,197c +3:13,179c + 2.10.5 (21-Jul-2020) + + #2787 (partial fix): NPE after add mixin for enum + (reported by Denis K) + + 2.10.4 (03-May-2020) + + #2679: `ObjectMapper.readValue("123", Void.TYPE)` throws "should never occur" + (reported by Endre S) + + 2.10.3 (03-Mar-2020) + + #2482: `JSONMappingException` `Location` column number is one line Behind the actual + location + (reported by Kamal A, fixed by Ivo S) + #2599: NoClassDefFoundError at DeserializationContext. on Android 4.1.2 + and Jackson 2.10.0 + (reported by Tobias P) + #2602: ByteBufferSerializer produces unexpected results with a duplicated ByteBuffer + and a position > 0 + (reported by Eduard T) + #2605: Failure to deserializer polymorphic subtypes of base type `Enum` + (reported by uewle@github) + #2610: `EXTERNAL_PROPERTY` doesn't work with `@JsonIgnoreProperties` + (reported, fix suggested by Alexander S) + + 2.10.2 (05-Jan-2020) + + #2101: `FAIL_ON_NULL_FOR_PRIMITIVES` failure does not indicate field name in exception message + (reported by raderio@github) + + 2.10.1 (09-Nov-2019) + + #2457: Extended enum values are not handled as enums when used as Map keys + (reported by Andrey K) + #2473: Array index missing in path of `JsonMappingException` for `Collection`, + with custom deserializer + (reported by João G) + #2475: `StringCollectionSerializer` calls `JsonGenerator.setCurrentValue(value)`, + which messes up current value for sibling properties + (reported by Ryan B) + #2485: Add `uses` for `Module` in module-info + (contributed by Marc M) + #2513: BigDecimalAsStringSerializer in NumberSerializer throws IllegalStateException in 2.10 + (reported by Johan H) + #2519: Serializing `BigDecimal` values inside containers ignores shape override + (reported by Richard W) + #2520: Sub-optimal exception message when failing to deserialize non-static inner classes + (reported by Mark S) + #2529: Add tests to ensure `EnumSet` and `EnumMap` work correctly with "null-as-empty" + #2534: Add `BasicPolymorphicTypeValidator.Builder.allowIfSubTypeIsArray()` + #2535: Allow String-to-byte[] coercion for String-value collections + + 2.10.0 (26-Sep-2019) + + #18: Make `JsonNode` serializable + #1093: Default typing does not work with `writerFor(Object.class)` + (reported by hoomanv@github) + #1675: Remove "impossible" `IOException` in `readTree()` and `readValue()` `ObjectMapper` + methods which accept Strings + (requested by matthew-pwnieexpress@github) + #1954: Add Builder pattern for creating configured `ObjectMapper` instances + #1995: Limit size of `DeserializerCache`, auto-flush on exceeding + #2059: Remove `final` modifier for `TypeFactory` + (requested by Thibaut R) + #2077: `JsonTypeInfo` with a subtype having `JsonFormat.Shape.ARRAY` and + no fields generates `{}` not `[]` + (reported by Sadayuki F) + #2115: Support naive deserialization of `Serializable` values as "untyped", same + as `java.lang.Object` + (requested by Christopher S) + #2116: Make NumberSerializers.Base public and its inherited classes not final + (requested by Édouard M) + #2126: `DeserializationContext.instantiationException()` throws `InvalidDefinitionException` + #2129: Add `SerializationFeature.WRITE_ENUM_KEYS_USING_INDEX`, separate from value setting + (suggested by renzihui@github) + #2133: Improve `DeserializationProblemHandler.handleUnexpectedToken()` to allow handling of + Collection problems + (contributed by Semyon L) + #2149: Add `MapperFeature.ACCEPT_CASE_INSENSITIVE_VALUES` + (suggested by Craig P) + #2153: Add `JsonMapper` to replace generic `ObjectMapper` usage + #2164: `FactoryBasedEnumDeserializer` does not respect + `DeserializationFeature.WRAP_EXCEPTIONS` + (reported by Yiqiu H) + #2187: Make `JsonNode.toString()` use shared `ObjectMapper` to produce valid json + #2189: `TreeTraversingParser` does not check int bounds + (reported by Alexander S) + #2195: Add abstraction `PolymorphicTypeValidator`, for limiting subtypes allowed by + default typing, `@JsonTypeInfo` + #2196: Type safety for `readValue()` with `TypeReference` + (suggested by nguyenfilip@github) + #2204: Add `JsonNode.isEmpty()` as convenience alias + #2211: Change of behavior (2.8 -> 2.9) with `ObjectMapper.readTree(input)` with no content + #2217: Suboptimal memory allocation in `TextNode.getBinaryValue()` + (reported by Christoph B) + #2220: Force serialization always for `convertValue()`; avoid short-cuts + #2223: Add `missingNode()` method in `JsonNodeFactory` + #2227: Minor cleanup of exception message for `Enum` binding failure + (reported by RightHandedMonkey@github) + #2230: `WRITE_BIGDECIMAL_AS_PLAIN` is ignored if `@JsonFormat` is used + (reported by Pavel C) + #2236: Type id not provided on `Double.NaN`, `Infinity` with `@JsonTypeInfo` + (reported by C-B-B@github) + #2237: Add "required" methods in `JsonNode`: `required(String | int)`, + `requiredAt(JsonPointer)` + #2241: Add `PropertyNamingStrategy.LOWER_DOT_CASE` for dot-delimited names + (contributed by zenglian@github.com) + #2251: Getter that returns an abstract collection breaks a delegating `@JsonCreator` + #2265: Inconsistent handling of Collections$UnmodifiableList vs Collections$UnmodifiableRandomAccessList + #2273: Add basic Java 9+ module info + #2280: JsonMerge not work with constructor args + (reported by Deblock T) + #2309: READ_ENUMS_USING_TO_STRING doesn't support null values + (reported, fix suggested by Ben A) + #2311: Unnecessary MultiView creation for property writers + (suggested by Manuel H) + #2331: `JsonMappingException` through nested getter with generic wildcard return type + (reported by sunchezz89@github) + #2336: `MapDeserializer` can not merge `Map`s with polymorphic values + (reported by Robert G) + #2338: Suboptimal return type for `JsonNode.withArray()` + (reported by Victor N) + #2339: Suboptimal return type for `ObjectNode.set()` + (reported by Victor N) + #2348: Add sanity checks for `ObjectMapper.readXXX()` methods + (requested by ebundy@github) + #2349: Add option `DefaultTyping.EVERYTHING` to support Kotlin data classes + #2357: Lack of path on MismatchedInputException + (suggested by TheEin@github) + #2378: `@JsonAlias` doesn't work with AutoValue + (reported by David H) + #2390: `Iterable` serialization breaks when adding `@JsonFilter` annotation + (reported by Chris M) + #2392: `BeanDeserializerModifier.modifyDeserializer()` not applied to custom bean deserializers + (reported by andreasbaus@github) + #2393: `TreeTraversingParser.getLongValue()` incorrectly checks `canConvertToInt()` + (reported by RabbidDog@github) + #2398: Replace recursion in `TokenBuffer.copyCurrentStructure()` with iteration + (reported by Sam S) + #2415: Builder-based POJO deserializer should pass builder instance, not type, + to `handleUnknownVanilla()` + (proposed by Vladimir T, follow up to #822) + #2416: Optimize `ValueInstantiator` construction for default `Collection`, `Map` types + #2422: `scala.collection.immutable.ListMap` fails to serialize since 2.9.3 + (reported by dejanlokar1@github) + #2424: Add global config override setting for `@JsonFormat.lenient()` + #2428: Use "activateDefaultTyping" over "enableDefaultTyping" in 2.10 with new methods + #2430: Change `ObjectMapper.valueToTree()` to convert `null` to `NullNode` + #2432: Add support for module bundles + (contributed by Marcos P) + #2433: Improve `NullNode.equals()` + (suggested by David B) + #2442: `ArrayNode.addAll()` adds raw `null` values which cause NPE on `deepCopy()` + and `toString()` + (reported, fix contributed by Hesham M) + #2446: Java 11: Unable to load JDK7 types (annotations, java.nio.file.Path): no Java7 support added + (reported by David C) + #2451: Add new `JsonValueFormat` value, `UUID` + #2453: Add `DeserializationContext.readTree(JsonParser)` convenience method + #2458: `Nulls` property metadata ignored for creators + (reported by XakepSDK@github) + #2466: Didn't find class "java.nio.file.Path" below Android api 26 + (reported by KevynBct@github) + #2467: Accept `JsonTypeInfo.As.WRAPPER_ARRAY` with no second argument to + deserialize as "null value" + (contributed by Martin C) +====1 +1:22,86c + #2798: Block one more gadget type (com.pastdev.httpcomponents, CVE-2020-24750) + (reported by Al1ex@knownsec) + #2814: Block one more gadget type (Anteros-DBCP, CVE-2020-24616) + (reported by ChenZhaojun) + #2826: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + #2827: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + + 2.9.10.5 (21-Jun-2020) + + #2688: Block one more gadget type (apache-drill, CVE-2020-14060) + (reported by Topsec(tcc)) + #2698: Block one more gadget type (weblogic/oracle-aqjms, CVE-2020-14061) + (reported by Fangrun Li) + #2704: Block one more gadget type (jaxp-ri, CVE-2020-14062) + (reported by XuYuanzhen) + #2765: Block one more gadget type (org.jsecurity, CVE-2020-14195) + (reported by Al1ex@knownsec) + + 2.9.10.4 (11-Apr-2020) + + #2631: Block one more gadget type (shaded-hikari-config, CVE-2020-9546) + (reported by threedr3am & LFY) + #2634: Block two more gadget types (ibatis-sqlmap, anteros-core; CVE-2020-9547 / CVE-2020-9548) + (reported by threedr3am & V1ZkRA) + #2642: Block one more gadget type (javax.swing, CVE-2020-10969) + (reported by threedr3am) + #2648: Block one more gadget type (shiro-core) + #2653: Block one more gadget type (shiro-core) + #2658: Block one more gadget type (ignite-jta, CVE-2020-10650) + (reported by Srikanth Ramu, threedr3am'follower) + #2659: Block one more gadget type (aries.transaction.jms, CVE-2020-10672) + (reported by Srikanth Ramu) + #2660: Block one more gadget type (caucho-quercus, CVE-2020-10673) + (reported by threedr3am'follower) + #2662: Block one more gadget type (bus-proxy, CVE-2020-10968) + (reported by XuYuanzhen) + #2664: Block one more gadget type (activemq-pool[-jms], CVE-2020-11111) + (reported by Srikanth Ramu) + #2666: Block one more gadget type (apache/commons-proxy, CVE-2020-11112) + (reported by Yiting Fan) + #2670: Block one more gadget type (openjpa, CVE-2020-11113) + (reported by XuYuanzhen) + #2680: Block one more gadget type (SSRF, spring-aop, CVE-2020-11619) + #2682: Block one more gadget type (commons-jelly, CVE-2020-11620) + + 2.9.10.3 (23-Feb-2020) + + #2620: Block one more gadget type (xbean-reflect/JNDI - CVE-2020-8840) + (reported by threedr3am@github) + + 2.9.10.2 (03-Jan-2020) + + #2526: Block two more gadget types (ehcache/JNDI - CVE-2019-20330) + (repoerted by UltramanGaia) + #2544: java.lang.NoClassDefFoundError Thrown for compact profile1 + (reported by Jon A) + + 2.9.10.1 (20-Oct-2019) + + #2478: Block two more gadget types (commons-dbcp, p6spy, + CVE-2019-16942 / CVE-2019-16943) + (reported by b5mali4 / root@codersec.net) + #2498: Block one more gadget type (log4j-extras/1.2, CVE-2019-17531) +2:199c +3:181c + [2.9.10.x micro-patches omitted] diff --git a/src/python/merge_conflict_analysis_diffs/921/gitmerge_ort_adjacent/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/921/gitmerge_ort_adjacent/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..67785841ec --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/921/gitmerge_ort_adjacent/diff_VERSION-2.x.txt @@ -0,0 +1,259 @@ +====1 +1:7,12c + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) +2:7c +3:7c + 2.10.5.1 (02-Dec-2020) +====1 +1:17,18c + #2854: Block one more gadget type (javax.swing, CVE-2020-xxx) + (reported by Yangkun(ICSL)) +2:11a +3:11a +====1 +1:20c + 2.9.10.6 (24-Aug-2020) +2:13,179c +3:13,179c + 2.10.5 (21-Jul-2020) + + #2787 (partial fix): NPE after add mixin for enum + (reported by Denis K) + + 2.10.4 (03-May-2020) + + #2679: `ObjectMapper.readValue("123", Void.TYPE)` throws "should never occur" + (reported by Endre S) + + 2.10.3 (03-Mar-2020) + + #2482: `JSONMappingException` `Location` column number is one line Behind the actual + location + (reported by Kamal A, fixed by Ivo S) + #2599: NoClassDefFoundError at DeserializationContext. on Android 4.1.2 + and Jackson 2.10.0 + (reported by Tobias P) + #2602: ByteBufferSerializer produces unexpected results with a duplicated ByteBuffer + and a position > 0 + (reported by Eduard T) + #2605: Failure to deserializer polymorphic subtypes of base type `Enum` + (reported by uewle@github) + #2610: `EXTERNAL_PROPERTY` doesn't work with `@JsonIgnoreProperties` + (reported, fix suggested by Alexander S) + + 2.10.2 (05-Jan-2020) + + #2101: `FAIL_ON_NULL_FOR_PRIMITIVES` failure does not indicate field name in exception message + (reported by raderio@github) + + 2.10.1 (09-Nov-2019) + + #2457: Extended enum values are not handled as enums when used as Map keys + (reported by Andrey K) + #2473: Array index missing in path of `JsonMappingException` for `Collection`, + with custom deserializer + (reported by João G) + #2475: `StringCollectionSerializer` calls `JsonGenerator.setCurrentValue(value)`, + which messes up current value for sibling properties + (reported by Ryan B) + #2485: Add `uses` for `Module` in module-info + (contributed by Marc M) + #2513: BigDecimalAsStringSerializer in NumberSerializer throws IllegalStateException in 2.10 + (reported by Johan H) + #2519: Serializing `BigDecimal` values inside containers ignores shape override + (reported by Richard W) + #2520: Sub-optimal exception message when failing to deserialize non-static inner classes + (reported by Mark S) + #2529: Add tests to ensure `EnumSet` and `EnumMap` work correctly with "null-as-empty" + #2534: Add `BasicPolymorphicTypeValidator.Builder.allowIfSubTypeIsArray()` + #2535: Allow String-to-byte[] coercion for String-value collections + + 2.10.0 (26-Sep-2019) + + #18: Make `JsonNode` serializable + #1093: Default typing does not work with `writerFor(Object.class)` + (reported by hoomanv@github) + #1675: Remove "impossible" `IOException` in `readTree()` and `readValue()` `ObjectMapper` + methods which accept Strings + (requested by matthew-pwnieexpress@github) + #1954: Add Builder pattern for creating configured `ObjectMapper` instances + #1995: Limit size of `DeserializerCache`, auto-flush on exceeding + #2059: Remove `final` modifier for `TypeFactory` + (requested by Thibaut R) + #2077: `JsonTypeInfo` with a subtype having `JsonFormat.Shape.ARRAY` and + no fields generates `{}` not `[]` + (reported by Sadayuki F) + #2115: Support naive deserialization of `Serializable` values as "untyped", same + as `java.lang.Object` + (requested by Christopher S) + #2116: Make NumberSerializers.Base public and its inherited classes not final + (requested by Édouard M) + #2126: `DeserializationContext.instantiationException()` throws `InvalidDefinitionException` + #2129: Add `SerializationFeature.WRITE_ENUM_KEYS_USING_INDEX`, separate from value setting + (suggested by renzihui@github) + #2133: Improve `DeserializationProblemHandler.handleUnexpectedToken()` to allow handling of + Collection problems + (contributed by Semyon L) + #2149: Add `MapperFeature.ACCEPT_CASE_INSENSITIVE_VALUES` + (suggested by Craig P) + #2153: Add `JsonMapper` to replace generic `ObjectMapper` usage + #2164: `FactoryBasedEnumDeserializer` does not respect + `DeserializationFeature.WRAP_EXCEPTIONS` + (reported by Yiqiu H) + #2187: Make `JsonNode.toString()` use shared `ObjectMapper` to produce valid json + #2189: `TreeTraversingParser` does not check int bounds + (reported by Alexander S) + #2195: Add abstraction `PolymorphicTypeValidator`, for limiting subtypes allowed by + default typing, `@JsonTypeInfo` + #2196: Type safety for `readValue()` with `TypeReference` + (suggested by nguyenfilip@github) + #2204: Add `JsonNode.isEmpty()` as convenience alias + #2211: Change of behavior (2.8 -> 2.9) with `ObjectMapper.readTree(input)` with no content + #2217: Suboptimal memory allocation in `TextNode.getBinaryValue()` + (reported by Christoph B) + #2220: Force serialization always for `convertValue()`; avoid short-cuts + #2223: Add `missingNode()` method in `JsonNodeFactory` + #2227: Minor cleanup of exception message for `Enum` binding failure + (reported by RightHandedMonkey@github) + #2230: `WRITE_BIGDECIMAL_AS_PLAIN` is ignored if `@JsonFormat` is used + (reported by Pavel C) + #2236: Type id not provided on `Double.NaN`, `Infinity` with `@JsonTypeInfo` + (reported by C-B-B@github) + #2237: Add "required" methods in `JsonNode`: `required(String | int)`, + `requiredAt(JsonPointer)` + #2241: Add `PropertyNamingStrategy.LOWER_DOT_CASE` for dot-delimited names + (contributed by zenglian@github.com) + #2251: Getter that returns an abstract collection breaks a delegating `@JsonCreator` + #2265: Inconsistent handling of Collections$UnmodifiableList vs Collections$UnmodifiableRandomAccessList + #2273: Add basic Java 9+ module info + #2280: JsonMerge not work with constructor args + (reported by Deblock T) + #2309: READ_ENUMS_USING_TO_STRING doesn't support null values + (reported, fix suggested by Ben A) + #2311: Unnecessary MultiView creation for property writers + (suggested by Manuel H) + #2331: `JsonMappingException` through nested getter with generic wildcard return type + (reported by sunchezz89@github) + #2336: `MapDeserializer` can not merge `Map`s with polymorphic values + (reported by Robert G) + #2338: Suboptimal return type for `JsonNode.withArray()` + (reported by Victor N) + #2339: Suboptimal return type for `ObjectNode.set()` + (reported by Victor N) + #2348: Add sanity checks for `ObjectMapper.readXXX()` methods + (requested by ebundy@github) + #2349: Add option `DefaultTyping.EVERYTHING` to support Kotlin data classes + #2357: Lack of path on MismatchedInputException + (suggested by TheEin@github) + #2378: `@JsonAlias` doesn't work with AutoValue + (reported by David H) + #2390: `Iterable` serialization breaks when adding `@JsonFilter` annotation + (reported by Chris M) + #2392: `BeanDeserializerModifier.modifyDeserializer()` not applied to custom bean deserializers + (reported by andreasbaus@github) + #2393: `TreeTraversingParser.getLongValue()` incorrectly checks `canConvertToInt()` + (reported by RabbidDog@github) + #2398: Replace recursion in `TokenBuffer.copyCurrentStructure()` with iteration + (reported by Sam S) + #2415: Builder-based POJO deserializer should pass builder instance, not type, + to `handleUnknownVanilla()` + (proposed by Vladimir T, follow up to #822) + #2416: Optimize `ValueInstantiator` construction for default `Collection`, `Map` types + #2422: `scala.collection.immutable.ListMap` fails to serialize since 2.9.3 + (reported by dejanlokar1@github) + #2424: Add global config override setting for `@JsonFormat.lenient()` + #2428: Use "activateDefaultTyping" over "enableDefaultTyping" in 2.10 with new methods + #2430: Change `ObjectMapper.valueToTree()` to convert `null` to `NullNode` + #2432: Add support for module bundles + (contributed by Marcos P) + #2433: Improve `NullNode.equals()` + (suggested by David B) + #2442: `ArrayNode.addAll()` adds raw `null` values which cause NPE on `deepCopy()` + and `toString()` + (reported, fix contributed by Hesham M) + #2446: Java 11: Unable to load JDK7 types (annotations, java.nio.file.Path): no Java7 support added + (reported by David C) + #2451: Add new `JsonValueFormat` value, `UUID` + #2453: Add `DeserializationContext.readTree(JsonParser)` convenience method + #2458: `Nulls` property metadata ignored for creators + (reported by XakepSDK@github) + #2466: Didn't find class "java.nio.file.Path" below Android api 26 + (reported by KevynBct@github) + #2467: Accept `JsonTypeInfo.As.WRAPPER_ARRAY` with no second argument to + deserialize as "null value" + (contributed by Martin C) +====1 +1:22,86c + #2798: Block one more gadget type (com.pastdev.httpcomponents, CVE-2020-24750) + (reported by Al1ex@knownsec) + #2814: Block one more gadget type (Anteros-DBCP, CVE-2020-24616) + (reported by ChenZhaojun) + #2826: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + #2827: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + + 2.9.10.5 (21-Jun-2020) + + #2688: Block one more gadget type (apache-drill, CVE-2020-14060) + (reported by Topsec(tcc)) + #2698: Block one more gadget type (weblogic/oracle-aqjms, CVE-2020-14061) + (reported by Fangrun Li) + #2704: Block one more gadget type (jaxp-ri, CVE-2020-14062) + (reported by XuYuanzhen) + #2765: Block one more gadget type (org.jsecurity, CVE-2020-14195) + (reported by Al1ex@knownsec) + + 2.9.10.4 (11-Apr-2020) + + #2631: Block one more gadget type (shaded-hikari-config, CVE-2020-9546) + (reported by threedr3am & LFY) + #2634: Block two more gadget types (ibatis-sqlmap, anteros-core; CVE-2020-9547 / CVE-2020-9548) + (reported by threedr3am & V1ZkRA) + #2642: Block one more gadget type (javax.swing, CVE-2020-10969) + (reported by threedr3am) + #2648: Block one more gadget type (shiro-core) + #2653: Block one more gadget type (shiro-core) + #2658: Block one more gadget type (ignite-jta, CVE-2020-10650) + (reported by Srikanth Ramu, threedr3am'follower) + #2659: Block one more gadget type (aries.transaction.jms, CVE-2020-10672) + (reported by Srikanth Ramu) + #2660: Block one more gadget type (caucho-quercus, CVE-2020-10673) + (reported by threedr3am'follower) + #2662: Block one more gadget type (bus-proxy, CVE-2020-10968) + (reported by XuYuanzhen) + #2664: Block one more gadget type (activemq-pool[-jms], CVE-2020-11111) + (reported by Srikanth Ramu) + #2666: Block one more gadget type (apache/commons-proxy, CVE-2020-11112) + (reported by Yiting Fan) + #2670: Block one more gadget type (openjpa, CVE-2020-11113) + (reported by XuYuanzhen) + #2680: Block one more gadget type (SSRF, spring-aop, CVE-2020-11619) + #2682: Block one more gadget type (commons-jelly, CVE-2020-11620) + + 2.9.10.3 (23-Feb-2020) + + #2620: Block one more gadget type (xbean-reflect/JNDI - CVE-2020-8840) + (reported by threedr3am@github) + + 2.9.10.2 (03-Jan-2020) + + #2526: Block two more gadget types (ehcache/JNDI - CVE-2019-20330) + (repoerted by UltramanGaia) + #2544: java.lang.NoClassDefFoundError Thrown for compact profile1 + (reported by Jon A) + + 2.9.10.1 (20-Oct-2019) + + #2478: Block two more gadget types (commons-dbcp, p6spy, + CVE-2019-16942 / CVE-2019-16943) + (reported by b5mali4 / root@codersec.net) + #2498: Block one more gadget type (log4j-extras/1.2, CVE-2019-17531) +2:181c +3:181c + [2.9.10.x micro-patches omitted] diff --git a/src/python/merge_conflict_analysis_diffs/921/gitmerge_ort_ignorespace/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/921/gitmerge_ort_ignorespace/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..f1911744f6 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/921/gitmerge_ort_ignorespace/diff_VERSION-2.x.txt @@ -0,0 +1,278 @@ +==== +1:7,12c + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) +2:7,25c + <<<<<<< HEAD + 2.10.5.1 (02-Dec-2020) + ||||||| 3d2903e8a + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) + ======= + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + #2996: Block 2 more gadget types (placeholder) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) + >>>>>>> TEMP_RIGHT_BRANCH +3:7c + 2.10.5.1 (02-Dec-2020) +====1 +1:17,18c + #2854: Block one more gadget type (javax.swing, CVE-2020-xxx) + (reported by Yangkun(ICSL)) +2:29a +3:11a +====1 +1:20c + 2.9.10.6 (24-Aug-2020) +2:31,197c +3:13,179c + 2.10.5 (21-Jul-2020) + + #2787 (partial fix): NPE after add mixin for enum + (reported by Denis K) + + 2.10.4 (03-May-2020) + + #2679: `ObjectMapper.readValue("123", Void.TYPE)` throws "should never occur" + (reported by Endre S) + + 2.10.3 (03-Mar-2020) + + #2482: `JSONMappingException` `Location` column number is one line Behind the actual + location + (reported by Kamal A, fixed by Ivo S) + #2599: NoClassDefFoundError at DeserializationContext. on Android 4.1.2 + and Jackson 2.10.0 + (reported by Tobias P) + #2602: ByteBufferSerializer produces unexpected results with a duplicated ByteBuffer + and a position > 0 + (reported by Eduard T) + #2605: Failure to deserializer polymorphic subtypes of base type `Enum` + (reported by uewle@github) + #2610: `EXTERNAL_PROPERTY` doesn't work with `@JsonIgnoreProperties` + (reported, fix suggested by Alexander S) + + 2.10.2 (05-Jan-2020) + + #2101: `FAIL_ON_NULL_FOR_PRIMITIVES` failure does not indicate field name in exception message + (reported by raderio@github) + + 2.10.1 (09-Nov-2019) + + #2457: Extended enum values are not handled as enums when used as Map keys + (reported by Andrey K) + #2473: Array index missing in path of `JsonMappingException` for `Collection`, + with custom deserializer + (reported by João G) + #2475: `StringCollectionSerializer` calls `JsonGenerator.setCurrentValue(value)`, + which messes up current value for sibling properties + (reported by Ryan B) + #2485: Add `uses` for `Module` in module-info + (contributed by Marc M) + #2513: BigDecimalAsStringSerializer in NumberSerializer throws IllegalStateException in 2.10 + (reported by Johan H) + #2519: Serializing `BigDecimal` values inside containers ignores shape override + (reported by Richard W) + #2520: Sub-optimal exception message when failing to deserialize non-static inner classes + (reported by Mark S) + #2529: Add tests to ensure `EnumSet` and `EnumMap` work correctly with "null-as-empty" + #2534: Add `BasicPolymorphicTypeValidator.Builder.allowIfSubTypeIsArray()` + #2535: Allow String-to-byte[] coercion for String-value collections + + 2.10.0 (26-Sep-2019) + + #18: Make `JsonNode` serializable + #1093: Default typing does not work with `writerFor(Object.class)` + (reported by hoomanv@github) + #1675: Remove "impossible" `IOException` in `readTree()` and `readValue()` `ObjectMapper` + methods which accept Strings + (requested by matthew-pwnieexpress@github) + #1954: Add Builder pattern for creating configured `ObjectMapper` instances + #1995: Limit size of `DeserializerCache`, auto-flush on exceeding + #2059: Remove `final` modifier for `TypeFactory` + (requested by Thibaut R) + #2077: `JsonTypeInfo` with a subtype having `JsonFormat.Shape.ARRAY` and + no fields generates `{}` not `[]` + (reported by Sadayuki F) + #2115: Support naive deserialization of `Serializable` values as "untyped", same + as `java.lang.Object` + (requested by Christopher S) + #2116: Make NumberSerializers.Base public and its inherited classes not final + (requested by Édouard M) + #2126: `DeserializationContext.instantiationException()` throws `InvalidDefinitionException` + #2129: Add `SerializationFeature.WRITE_ENUM_KEYS_USING_INDEX`, separate from value setting + (suggested by renzihui@github) + #2133: Improve `DeserializationProblemHandler.handleUnexpectedToken()` to allow handling of + Collection problems + (contributed by Semyon L) + #2149: Add `MapperFeature.ACCEPT_CASE_INSENSITIVE_VALUES` + (suggested by Craig P) + #2153: Add `JsonMapper` to replace generic `ObjectMapper` usage + #2164: `FactoryBasedEnumDeserializer` does not respect + `DeserializationFeature.WRAP_EXCEPTIONS` + (reported by Yiqiu H) + #2187: Make `JsonNode.toString()` use shared `ObjectMapper` to produce valid json + #2189: `TreeTraversingParser` does not check int bounds + (reported by Alexander S) + #2195: Add abstraction `PolymorphicTypeValidator`, for limiting subtypes allowed by + default typing, `@JsonTypeInfo` + #2196: Type safety for `readValue()` with `TypeReference` + (suggested by nguyenfilip@github) + #2204: Add `JsonNode.isEmpty()` as convenience alias + #2211: Change of behavior (2.8 -> 2.9) with `ObjectMapper.readTree(input)` with no content + #2217: Suboptimal memory allocation in `TextNode.getBinaryValue()` + (reported by Christoph B) + #2220: Force serialization always for `convertValue()`; avoid short-cuts + #2223: Add `missingNode()` method in `JsonNodeFactory` + #2227: Minor cleanup of exception message for `Enum` binding failure + (reported by RightHandedMonkey@github) + #2230: `WRITE_BIGDECIMAL_AS_PLAIN` is ignored if `@JsonFormat` is used + (reported by Pavel C) + #2236: Type id not provided on `Double.NaN`, `Infinity` with `@JsonTypeInfo` + (reported by C-B-B@github) + #2237: Add "required" methods in `JsonNode`: `required(String | int)`, + `requiredAt(JsonPointer)` + #2241: Add `PropertyNamingStrategy.LOWER_DOT_CASE` for dot-delimited names + (contributed by zenglian@github.com) + #2251: Getter that returns an abstract collection breaks a delegating `@JsonCreator` + #2265: Inconsistent handling of Collections$UnmodifiableList vs Collections$UnmodifiableRandomAccessList + #2273: Add basic Java 9+ module info + #2280: JsonMerge not work with constructor args + (reported by Deblock T) + #2309: READ_ENUMS_USING_TO_STRING doesn't support null values + (reported, fix suggested by Ben A) + #2311: Unnecessary MultiView creation for property writers + (suggested by Manuel H) + #2331: `JsonMappingException` through nested getter with generic wildcard return type + (reported by sunchezz89@github) + #2336: `MapDeserializer` can not merge `Map`s with polymorphic values + (reported by Robert G) + #2338: Suboptimal return type for `JsonNode.withArray()` + (reported by Victor N) + #2339: Suboptimal return type for `ObjectNode.set()` + (reported by Victor N) + #2348: Add sanity checks for `ObjectMapper.readXXX()` methods + (requested by ebundy@github) + #2349: Add option `DefaultTyping.EVERYTHING` to support Kotlin data classes + #2357: Lack of path on MismatchedInputException + (suggested by TheEin@github) + #2378: `@JsonAlias` doesn't work with AutoValue + (reported by David H) + #2390: `Iterable` serialization breaks when adding `@JsonFilter` annotation + (reported by Chris M) + #2392: `BeanDeserializerModifier.modifyDeserializer()` not applied to custom bean deserializers + (reported by andreasbaus@github) + #2393: `TreeTraversingParser.getLongValue()` incorrectly checks `canConvertToInt()` + (reported by RabbidDog@github) + #2398: Replace recursion in `TokenBuffer.copyCurrentStructure()` with iteration + (reported by Sam S) + #2415: Builder-based POJO deserializer should pass builder instance, not type, + to `handleUnknownVanilla()` + (proposed by Vladimir T, follow up to #822) + #2416: Optimize `ValueInstantiator` construction for default `Collection`, `Map` types + #2422: `scala.collection.immutable.ListMap` fails to serialize since 2.9.3 + (reported by dejanlokar1@github) + #2424: Add global config override setting for `@JsonFormat.lenient()` + #2428: Use "activateDefaultTyping" over "enableDefaultTyping" in 2.10 with new methods + #2430: Change `ObjectMapper.valueToTree()` to convert `null` to `NullNode` + #2432: Add support for module bundles + (contributed by Marcos P) + #2433: Improve `NullNode.equals()` + (suggested by David B) + #2442: `ArrayNode.addAll()` adds raw `null` values which cause NPE on `deepCopy()` + and `toString()` + (reported, fix contributed by Hesham M) + #2446: Java 11: Unable to load JDK7 types (annotations, java.nio.file.Path): no Java7 support added + (reported by David C) + #2451: Add new `JsonValueFormat` value, `UUID` + #2453: Add `DeserializationContext.readTree(JsonParser)` convenience method + #2458: `Nulls` property metadata ignored for creators + (reported by XakepSDK@github) + #2466: Didn't find class "java.nio.file.Path" below Android api 26 + (reported by KevynBct@github) + #2467: Accept `JsonTypeInfo.As.WRAPPER_ARRAY` with no second argument to + deserialize as "null value" + (contributed by Martin C) +====1 +1:22,86c + #2798: Block one more gadget type (com.pastdev.httpcomponents, CVE-2020-24750) + (reported by Al1ex@knownsec) + #2814: Block one more gadget type (Anteros-DBCP, CVE-2020-24616) + (reported by ChenZhaojun) + #2826: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + #2827: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + + 2.9.10.5 (21-Jun-2020) + + #2688: Block one more gadget type (apache-drill, CVE-2020-14060) + (reported by Topsec(tcc)) + #2698: Block one more gadget type (weblogic/oracle-aqjms, CVE-2020-14061) + (reported by Fangrun Li) + #2704: Block one more gadget type (jaxp-ri, CVE-2020-14062) + (reported by XuYuanzhen) + #2765: Block one more gadget type (org.jsecurity, CVE-2020-14195) + (reported by Al1ex@knownsec) + + 2.9.10.4 (11-Apr-2020) + + #2631: Block one more gadget type (shaded-hikari-config, CVE-2020-9546) + (reported by threedr3am & LFY) + #2634: Block two more gadget types (ibatis-sqlmap, anteros-core; CVE-2020-9547 / CVE-2020-9548) + (reported by threedr3am & V1ZkRA) + #2642: Block one more gadget type (javax.swing, CVE-2020-10969) + (reported by threedr3am) + #2648: Block one more gadget type (shiro-core) + #2653: Block one more gadget type (shiro-core) + #2658: Block one more gadget type (ignite-jta, CVE-2020-10650) + (reported by Srikanth Ramu, threedr3am'follower) + #2659: Block one more gadget type (aries.transaction.jms, CVE-2020-10672) + (reported by Srikanth Ramu) + #2660: Block one more gadget type (caucho-quercus, CVE-2020-10673) + (reported by threedr3am'follower) + #2662: Block one more gadget type (bus-proxy, CVE-2020-10968) + (reported by XuYuanzhen) + #2664: Block one more gadget type (activemq-pool[-jms], CVE-2020-11111) + (reported by Srikanth Ramu) + #2666: Block one more gadget type (apache/commons-proxy, CVE-2020-11112) + (reported by Yiting Fan) + #2670: Block one more gadget type (openjpa, CVE-2020-11113) + (reported by XuYuanzhen) + #2680: Block one more gadget type (SSRF, spring-aop, CVE-2020-11619) + #2682: Block one more gadget type (commons-jelly, CVE-2020-11620) + + 2.9.10.3 (23-Feb-2020) + + #2620: Block one more gadget type (xbean-reflect/JNDI - CVE-2020-8840) + (reported by threedr3am@github) + + 2.9.10.2 (03-Jan-2020) + + #2526: Block two more gadget types (ehcache/JNDI - CVE-2019-20330) + (repoerted by UltramanGaia) + #2544: java.lang.NoClassDefFoundError Thrown for compact profile1 + (reported by Jon A) + + 2.9.10.1 (20-Oct-2019) + + #2478: Block two more gadget types (commons-dbcp, p6spy, + CVE-2019-16942 / CVE-2019-16943) + (reported by b5mali4 / root@codersec.net) + #2498: Block one more gadget type (log4j-extras/1.2, CVE-2019-17531) +2:199c +3:181c + [2.9.10.x micro-patches omitted] diff --git a/src/python/merge_conflict_analysis_diffs/921/gitmerge_ort_imports/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/921/gitmerge_ort_imports/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..67785841ec --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/921/gitmerge_ort_imports/diff_VERSION-2.x.txt @@ -0,0 +1,259 @@ +====1 +1:7,12c + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) +2:7c +3:7c + 2.10.5.1 (02-Dec-2020) +====1 +1:17,18c + #2854: Block one more gadget type (javax.swing, CVE-2020-xxx) + (reported by Yangkun(ICSL)) +2:11a +3:11a +====1 +1:20c + 2.9.10.6 (24-Aug-2020) +2:13,179c +3:13,179c + 2.10.5 (21-Jul-2020) + + #2787 (partial fix): NPE after add mixin for enum + (reported by Denis K) + + 2.10.4 (03-May-2020) + + #2679: `ObjectMapper.readValue("123", Void.TYPE)` throws "should never occur" + (reported by Endre S) + + 2.10.3 (03-Mar-2020) + + #2482: `JSONMappingException` `Location` column number is one line Behind the actual + location + (reported by Kamal A, fixed by Ivo S) + #2599: NoClassDefFoundError at DeserializationContext. on Android 4.1.2 + and Jackson 2.10.0 + (reported by Tobias P) + #2602: ByteBufferSerializer produces unexpected results with a duplicated ByteBuffer + and a position > 0 + (reported by Eduard T) + #2605: Failure to deserializer polymorphic subtypes of base type `Enum` + (reported by uewle@github) + #2610: `EXTERNAL_PROPERTY` doesn't work with `@JsonIgnoreProperties` + (reported, fix suggested by Alexander S) + + 2.10.2 (05-Jan-2020) + + #2101: `FAIL_ON_NULL_FOR_PRIMITIVES` failure does not indicate field name in exception message + (reported by raderio@github) + + 2.10.1 (09-Nov-2019) + + #2457: Extended enum values are not handled as enums when used as Map keys + (reported by Andrey K) + #2473: Array index missing in path of `JsonMappingException` for `Collection`, + with custom deserializer + (reported by João G) + #2475: `StringCollectionSerializer` calls `JsonGenerator.setCurrentValue(value)`, + which messes up current value for sibling properties + (reported by Ryan B) + #2485: Add `uses` for `Module` in module-info + (contributed by Marc M) + #2513: BigDecimalAsStringSerializer in NumberSerializer throws IllegalStateException in 2.10 + (reported by Johan H) + #2519: Serializing `BigDecimal` values inside containers ignores shape override + (reported by Richard W) + #2520: Sub-optimal exception message when failing to deserialize non-static inner classes + (reported by Mark S) + #2529: Add tests to ensure `EnumSet` and `EnumMap` work correctly with "null-as-empty" + #2534: Add `BasicPolymorphicTypeValidator.Builder.allowIfSubTypeIsArray()` + #2535: Allow String-to-byte[] coercion for String-value collections + + 2.10.0 (26-Sep-2019) + + #18: Make `JsonNode` serializable + #1093: Default typing does not work with `writerFor(Object.class)` + (reported by hoomanv@github) + #1675: Remove "impossible" `IOException` in `readTree()` and `readValue()` `ObjectMapper` + methods which accept Strings + (requested by matthew-pwnieexpress@github) + #1954: Add Builder pattern for creating configured `ObjectMapper` instances + #1995: Limit size of `DeserializerCache`, auto-flush on exceeding + #2059: Remove `final` modifier for `TypeFactory` + (requested by Thibaut R) + #2077: `JsonTypeInfo` with a subtype having `JsonFormat.Shape.ARRAY` and + no fields generates `{}` not `[]` + (reported by Sadayuki F) + #2115: Support naive deserialization of `Serializable` values as "untyped", same + as `java.lang.Object` + (requested by Christopher S) + #2116: Make NumberSerializers.Base public and its inherited classes not final + (requested by Édouard M) + #2126: `DeserializationContext.instantiationException()` throws `InvalidDefinitionException` + #2129: Add `SerializationFeature.WRITE_ENUM_KEYS_USING_INDEX`, separate from value setting + (suggested by renzihui@github) + #2133: Improve `DeserializationProblemHandler.handleUnexpectedToken()` to allow handling of + Collection problems + (contributed by Semyon L) + #2149: Add `MapperFeature.ACCEPT_CASE_INSENSITIVE_VALUES` + (suggested by Craig P) + #2153: Add `JsonMapper` to replace generic `ObjectMapper` usage + #2164: `FactoryBasedEnumDeserializer` does not respect + `DeserializationFeature.WRAP_EXCEPTIONS` + (reported by Yiqiu H) + #2187: Make `JsonNode.toString()` use shared `ObjectMapper` to produce valid json + #2189: `TreeTraversingParser` does not check int bounds + (reported by Alexander S) + #2195: Add abstraction `PolymorphicTypeValidator`, for limiting subtypes allowed by + default typing, `@JsonTypeInfo` + #2196: Type safety for `readValue()` with `TypeReference` + (suggested by nguyenfilip@github) + #2204: Add `JsonNode.isEmpty()` as convenience alias + #2211: Change of behavior (2.8 -> 2.9) with `ObjectMapper.readTree(input)` with no content + #2217: Suboptimal memory allocation in `TextNode.getBinaryValue()` + (reported by Christoph B) + #2220: Force serialization always for `convertValue()`; avoid short-cuts + #2223: Add `missingNode()` method in `JsonNodeFactory` + #2227: Minor cleanup of exception message for `Enum` binding failure + (reported by RightHandedMonkey@github) + #2230: `WRITE_BIGDECIMAL_AS_PLAIN` is ignored if `@JsonFormat` is used + (reported by Pavel C) + #2236: Type id not provided on `Double.NaN`, `Infinity` with `@JsonTypeInfo` + (reported by C-B-B@github) + #2237: Add "required" methods in `JsonNode`: `required(String | int)`, + `requiredAt(JsonPointer)` + #2241: Add `PropertyNamingStrategy.LOWER_DOT_CASE` for dot-delimited names + (contributed by zenglian@github.com) + #2251: Getter that returns an abstract collection breaks a delegating `@JsonCreator` + #2265: Inconsistent handling of Collections$UnmodifiableList vs Collections$UnmodifiableRandomAccessList + #2273: Add basic Java 9+ module info + #2280: JsonMerge not work with constructor args + (reported by Deblock T) + #2309: READ_ENUMS_USING_TO_STRING doesn't support null values + (reported, fix suggested by Ben A) + #2311: Unnecessary MultiView creation for property writers + (suggested by Manuel H) + #2331: `JsonMappingException` through nested getter with generic wildcard return type + (reported by sunchezz89@github) + #2336: `MapDeserializer` can not merge `Map`s with polymorphic values + (reported by Robert G) + #2338: Suboptimal return type for `JsonNode.withArray()` + (reported by Victor N) + #2339: Suboptimal return type for `ObjectNode.set()` + (reported by Victor N) + #2348: Add sanity checks for `ObjectMapper.readXXX()` methods + (requested by ebundy@github) + #2349: Add option `DefaultTyping.EVERYTHING` to support Kotlin data classes + #2357: Lack of path on MismatchedInputException + (suggested by TheEin@github) + #2378: `@JsonAlias` doesn't work with AutoValue + (reported by David H) + #2390: `Iterable` serialization breaks when adding `@JsonFilter` annotation + (reported by Chris M) + #2392: `BeanDeserializerModifier.modifyDeserializer()` not applied to custom bean deserializers + (reported by andreasbaus@github) + #2393: `TreeTraversingParser.getLongValue()` incorrectly checks `canConvertToInt()` + (reported by RabbidDog@github) + #2398: Replace recursion in `TokenBuffer.copyCurrentStructure()` with iteration + (reported by Sam S) + #2415: Builder-based POJO deserializer should pass builder instance, not type, + to `handleUnknownVanilla()` + (proposed by Vladimir T, follow up to #822) + #2416: Optimize `ValueInstantiator` construction for default `Collection`, `Map` types + #2422: `scala.collection.immutable.ListMap` fails to serialize since 2.9.3 + (reported by dejanlokar1@github) + #2424: Add global config override setting for `@JsonFormat.lenient()` + #2428: Use "activateDefaultTyping" over "enableDefaultTyping" in 2.10 with new methods + #2430: Change `ObjectMapper.valueToTree()` to convert `null` to `NullNode` + #2432: Add support for module bundles + (contributed by Marcos P) + #2433: Improve `NullNode.equals()` + (suggested by David B) + #2442: `ArrayNode.addAll()` adds raw `null` values which cause NPE on `deepCopy()` + and `toString()` + (reported, fix contributed by Hesham M) + #2446: Java 11: Unable to load JDK7 types (annotations, java.nio.file.Path): no Java7 support added + (reported by David C) + #2451: Add new `JsonValueFormat` value, `UUID` + #2453: Add `DeserializationContext.readTree(JsonParser)` convenience method + #2458: `Nulls` property metadata ignored for creators + (reported by XakepSDK@github) + #2466: Didn't find class "java.nio.file.Path" below Android api 26 + (reported by KevynBct@github) + #2467: Accept `JsonTypeInfo.As.WRAPPER_ARRAY` with no second argument to + deserialize as "null value" + (contributed by Martin C) +====1 +1:22,86c + #2798: Block one more gadget type (com.pastdev.httpcomponents, CVE-2020-24750) + (reported by Al1ex@knownsec) + #2814: Block one more gadget type (Anteros-DBCP, CVE-2020-24616) + (reported by ChenZhaojun) + #2826: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + #2827: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + + 2.9.10.5 (21-Jun-2020) + + #2688: Block one more gadget type (apache-drill, CVE-2020-14060) + (reported by Topsec(tcc)) + #2698: Block one more gadget type (weblogic/oracle-aqjms, CVE-2020-14061) + (reported by Fangrun Li) + #2704: Block one more gadget type (jaxp-ri, CVE-2020-14062) + (reported by XuYuanzhen) + #2765: Block one more gadget type (org.jsecurity, CVE-2020-14195) + (reported by Al1ex@knownsec) + + 2.9.10.4 (11-Apr-2020) + + #2631: Block one more gadget type (shaded-hikari-config, CVE-2020-9546) + (reported by threedr3am & LFY) + #2634: Block two more gadget types (ibatis-sqlmap, anteros-core; CVE-2020-9547 / CVE-2020-9548) + (reported by threedr3am & V1ZkRA) + #2642: Block one more gadget type (javax.swing, CVE-2020-10969) + (reported by threedr3am) + #2648: Block one more gadget type (shiro-core) + #2653: Block one more gadget type (shiro-core) + #2658: Block one more gadget type (ignite-jta, CVE-2020-10650) + (reported by Srikanth Ramu, threedr3am'follower) + #2659: Block one more gadget type (aries.transaction.jms, CVE-2020-10672) + (reported by Srikanth Ramu) + #2660: Block one more gadget type (caucho-quercus, CVE-2020-10673) + (reported by threedr3am'follower) + #2662: Block one more gadget type (bus-proxy, CVE-2020-10968) + (reported by XuYuanzhen) + #2664: Block one more gadget type (activemq-pool[-jms], CVE-2020-11111) + (reported by Srikanth Ramu) + #2666: Block one more gadget type (apache/commons-proxy, CVE-2020-11112) + (reported by Yiting Fan) + #2670: Block one more gadget type (openjpa, CVE-2020-11113) + (reported by XuYuanzhen) + #2680: Block one more gadget type (SSRF, spring-aop, CVE-2020-11619) + #2682: Block one more gadget type (commons-jelly, CVE-2020-11620) + + 2.9.10.3 (23-Feb-2020) + + #2620: Block one more gadget type (xbean-reflect/JNDI - CVE-2020-8840) + (reported by threedr3am@github) + + 2.9.10.2 (03-Jan-2020) + + #2526: Block two more gadget types (ehcache/JNDI - CVE-2019-20330) + (repoerted by UltramanGaia) + #2544: java.lang.NoClassDefFoundError Thrown for compact profile1 + (reported by Jon A) + + 2.9.10.1 (20-Oct-2019) + + #2478: Block two more gadget types (commons-dbcp, p6spy, + CVE-2019-16942 / CVE-2019-16943) + (reported by b5mali4 / root@codersec.net) + #2498: Block one more gadget type (log4j-extras/1.2, CVE-2019-17531) +2:181c +3:181c + [2.9.10.x micro-patches omitted] diff --git a/src/python/merge_conflict_analysis_diffs/921/gitmerge_ort_imports_ignorespace/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/921/gitmerge_ort_imports_ignorespace/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..67785841ec --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/921/gitmerge_ort_imports_ignorespace/diff_VERSION-2.x.txt @@ -0,0 +1,259 @@ +====1 +1:7,12c + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) +2:7c +3:7c + 2.10.5.1 (02-Dec-2020) +====1 +1:17,18c + #2854: Block one more gadget type (javax.swing, CVE-2020-xxx) + (reported by Yangkun(ICSL)) +2:11a +3:11a +====1 +1:20c + 2.9.10.6 (24-Aug-2020) +2:13,179c +3:13,179c + 2.10.5 (21-Jul-2020) + + #2787 (partial fix): NPE after add mixin for enum + (reported by Denis K) + + 2.10.4 (03-May-2020) + + #2679: `ObjectMapper.readValue("123", Void.TYPE)` throws "should never occur" + (reported by Endre S) + + 2.10.3 (03-Mar-2020) + + #2482: `JSONMappingException` `Location` column number is one line Behind the actual + location + (reported by Kamal A, fixed by Ivo S) + #2599: NoClassDefFoundError at DeserializationContext. on Android 4.1.2 + and Jackson 2.10.0 + (reported by Tobias P) + #2602: ByteBufferSerializer produces unexpected results with a duplicated ByteBuffer + and a position > 0 + (reported by Eduard T) + #2605: Failure to deserializer polymorphic subtypes of base type `Enum` + (reported by uewle@github) + #2610: `EXTERNAL_PROPERTY` doesn't work with `@JsonIgnoreProperties` + (reported, fix suggested by Alexander S) + + 2.10.2 (05-Jan-2020) + + #2101: `FAIL_ON_NULL_FOR_PRIMITIVES` failure does not indicate field name in exception message + (reported by raderio@github) + + 2.10.1 (09-Nov-2019) + + #2457: Extended enum values are not handled as enums when used as Map keys + (reported by Andrey K) + #2473: Array index missing in path of `JsonMappingException` for `Collection`, + with custom deserializer + (reported by João G) + #2475: `StringCollectionSerializer` calls `JsonGenerator.setCurrentValue(value)`, + which messes up current value for sibling properties + (reported by Ryan B) + #2485: Add `uses` for `Module` in module-info + (contributed by Marc M) + #2513: BigDecimalAsStringSerializer in NumberSerializer throws IllegalStateException in 2.10 + (reported by Johan H) + #2519: Serializing `BigDecimal` values inside containers ignores shape override + (reported by Richard W) + #2520: Sub-optimal exception message when failing to deserialize non-static inner classes + (reported by Mark S) + #2529: Add tests to ensure `EnumSet` and `EnumMap` work correctly with "null-as-empty" + #2534: Add `BasicPolymorphicTypeValidator.Builder.allowIfSubTypeIsArray()` + #2535: Allow String-to-byte[] coercion for String-value collections + + 2.10.0 (26-Sep-2019) + + #18: Make `JsonNode` serializable + #1093: Default typing does not work with `writerFor(Object.class)` + (reported by hoomanv@github) + #1675: Remove "impossible" `IOException` in `readTree()` and `readValue()` `ObjectMapper` + methods which accept Strings + (requested by matthew-pwnieexpress@github) + #1954: Add Builder pattern for creating configured `ObjectMapper` instances + #1995: Limit size of `DeserializerCache`, auto-flush on exceeding + #2059: Remove `final` modifier for `TypeFactory` + (requested by Thibaut R) + #2077: `JsonTypeInfo` with a subtype having `JsonFormat.Shape.ARRAY` and + no fields generates `{}` not `[]` + (reported by Sadayuki F) + #2115: Support naive deserialization of `Serializable` values as "untyped", same + as `java.lang.Object` + (requested by Christopher S) + #2116: Make NumberSerializers.Base public and its inherited classes not final + (requested by Édouard M) + #2126: `DeserializationContext.instantiationException()` throws `InvalidDefinitionException` + #2129: Add `SerializationFeature.WRITE_ENUM_KEYS_USING_INDEX`, separate from value setting + (suggested by renzihui@github) + #2133: Improve `DeserializationProblemHandler.handleUnexpectedToken()` to allow handling of + Collection problems + (contributed by Semyon L) + #2149: Add `MapperFeature.ACCEPT_CASE_INSENSITIVE_VALUES` + (suggested by Craig P) + #2153: Add `JsonMapper` to replace generic `ObjectMapper` usage + #2164: `FactoryBasedEnumDeserializer` does not respect + `DeserializationFeature.WRAP_EXCEPTIONS` + (reported by Yiqiu H) + #2187: Make `JsonNode.toString()` use shared `ObjectMapper` to produce valid json + #2189: `TreeTraversingParser` does not check int bounds + (reported by Alexander S) + #2195: Add abstraction `PolymorphicTypeValidator`, for limiting subtypes allowed by + default typing, `@JsonTypeInfo` + #2196: Type safety for `readValue()` with `TypeReference` + (suggested by nguyenfilip@github) + #2204: Add `JsonNode.isEmpty()` as convenience alias + #2211: Change of behavior (2.8 -> 2.9) with `ObjectMapper.readTree(input)` with no content + #2217: Suboptimal memory allocation in `TextNode.getBinaryValue()` + (reported by Christoph B) + #2220: Force serialization always for `convertValue()`; avoid short-cuts + #2223: Add `missingNode()` method in `JsonNodeFactory` + #2227: Minor cleanup of exception message for `Enum` binding failure + (reported by RightHandedMonkey@github) + #2230: `WRITE_BIGDECIMAL_AS_PLAIN` is ignored if `@JsonFormat` is used + (reported by Pavel C) + #2236: Type id not provided on `Double.NaN`, `Infinity` with `@JsonTypeInfo` + (reported by C-B-B@github) + #2237: Add "required" methods in `JsonNode`: `required(String | int)`, + `requiredAt(JsonPointer)` + #2241: Add `PropertyNamingStrategy.LOWER_DOT_CASE` for dot-delimited names + (contributed by zenglian@github.com) + #2251: Getter that returns an abstract collection breaks a delegating `@JsonCreator` + #2265: Inconsistent handling of Collections$UnmodifiableList vs Collections$UnmodifiableRandomAccessList + #2273: Add basic Java 9+ module info + #2280: JsonMerge not work with constructor args + (reported by Deblock T) + #2309: READ_ENUMS_USING_TO_STRING doesn't support null values + (reported, fix suggested by Ben A) + #2311: Unnecessary MultiView creation for property writers + (suggested by Manuel H) + #2331: `JsonMappingException` through nested getter with generic wildcard return type + (reported by sunchezz89@github) + #2336: `MapDeserializer` can not merge `Map`s with polymorphic values + (reported by Robert G) + #2338: Suboptimal return type for `JsonNode.withArray()` + (reported by Victor N) + #2339: Suboptimal return type for `ObjectNode.set()` + (reported by Victor N) + #2348: Add sanity checks for `ObjectMapper.readXXX()` methods + (requested by ebundy@github) + #2349: Add option `DefaultTyping.EVERYTHING` to support Kotlin data classes + #2357: Lack of path on MismatchedInputException + (suggested by TheEin@github) + #2378: `@JsonAlias` doesn't work with AutoValue + (reported by David H) + #2390: `Iterable` serialization breaks when adding `@JsonFilter` annotation + (reported by Chris M) + #2392: `BeanDeserializerModifier.modifyDeserializer()` not applied to custom bean deserializers + (reported by andreasbaus@github) + #2393: `TreeTraversingParser.getLongValue()` incorrectly checks `canConvertToInt()` + (reported by RabbidDog@github) + #2398: Replace recursion in `TokenBuffer.copyCurrentStructure()` with iteration + (reported by Sam S) + #2415: Builder-based POJO deserializer should pass builder instance, not type, + to `handleUnknownVanilla()` + (proposed by Vladimir T, follow up to #822) + #2416: Optimize `ValueInstantiator` construction for default `Collection`, `Map` types + #2422: `scala.collection.immutable.ListMap` fails to serialize since 2.9.3 + (reported by dejanlokar1@github) + #2424: Add global config override setting for `@JsonFormat.lenient()` + #2428: Use "activateDefaultTyping" over "enableDefaultTyping" in 2.10 with new methods + #2430: Change `ObjectMapper.valueToTree()` to convert `null` to `NullNode` + #2432: Add support for module bundles + (contributed by Marcos P) + #2433: Improve `NullNode.equals()` + (suggested by David B) + #2442: `ArrayNode.addAll()` adds raw `null` values which cause NPE on `deepCopy()` + and `toString()` + (reported, fix contributed by Hesham M) + #2446: Java 11: Unable to load JDK7 types (annotations, java.nio.file.Path): no Java7 support added + (reported by David C) + #2451: Add new `JsonValueFormat` value, `UUID` + #2453: Add `DeserializationContext.readTree(JsonParser)` convenience method + #2458: `Nulls` property metadata ignored for creators + (reported by XakepSDK@github) + #2466: Didn't find class "java.nio.file.Path" below Android api 26 + (reported by KevynBct@github) + #2467: Accept `JsonTypeInfo.As.WRAPPER_ARRAY` with no second argument to + deserialize as "null value" + (contributed by Martin C) +====1 +1:22,86c + #2798: Block one more gadget type (com.pastdev.httpcomponents, CVE-2020-24750) + (reported by Al1ex@knownsec) + #2814: Block one more gadget type (Anteros-DBCP, CVE-2020-24616) + (reported by ChenZhaojun) + #2826: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + #2827: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + + 2.9.10.5 (21-Jun-2020) + + #2688: Block one more gadget type (apache-drill, CVE-2020-14060) + (reported by Topsec(tcc)) + #2698: Block one more gadget type (weblogic/oracle-aqjms, CVE-2020-14061) + (reported by Fangrun Li) + #2704: Block one more gadget type (jaxp-ri, CVE-2020-14062) + (reported by XuYuanzhen) + #2765: Block one more gadget type (org.jsecurity, CVE-2020-14195) + (reported by Al1ex@knownsec) + + 2.9.10.4 (11-Apr-2020) + + #2631: Block one more gadget type (shaded-hikari-config, CVE-2020-9546) + (reported by threedr3am & LFY) + #2634: Block two more gadget types (ibatis-sqlmap, anteros-core; CVE-2020-9547 / CVE-2020-9548) + (reported by threedr3am & V1ZkRA) + #2642: Block one more gadget type (javax.swing, CVE-2020-10969) + (reported by threedr3am) + #2648: Block one more gadget type (shiro-core) + #2653: Block one more gadget type (shiro-core) + #2658: Block one more gadget type (ignite-jta, CVE-2020-10650) + (reported by Srikanth Ramu, threedr3am'follower) + #2659: Block one more gadget type (aries.transaction.jms, CVE-2020-10672) + (reported by Srikanth Ramu) + #2660: Block one more gadget type (caucho-quercus, CVE-2020-10673) + (reported by threedr3am'follower) + #2662: Block one more gadget type (bus-proxy, CVE-2020-10968) + (reported by XuYuanzhen) + #2664: Block one more gadget type (activemq-pool[-jms], CVE-2020-11111) + (reported by Srikanth Ramu) + #2666: Block one more gadget type (apache/commons-proxy, CVE-2020-11112) + (reported by Yiting Fan) + #2670: Block one more gadget type (openjpa, CVE-2020-11113) + (reported by XuYuanzhen) + #2680: Block one more gadget type (SSRF, spring-aop, CVE-2020-11619) + #2682: Block one more gadget type (commons-jelly, CVE-2020-11620) + + 2.9.10.3 (23-Feb-2020) + + #2620: Block one more gadget type (xbean-reflect/JNDI - CVE-2020-8840) + (reported by threedr3am@github) + + 2.9.10.2 (03-Jan-2020) + + #2526: Block two more gadget types (ehcache/JNDI - CVE-2019-20330) + (repoerted by UltramanGaia) + #2544: java.lang.NoClassDefFoundError Thrown for compact profile1 + (reported by Jon A) + + 2.9.10.1 (20-Oct-2019) + + #2478: Block two more gadget types (commons-dbcp, p6spy, + CVE-2019-16942 / CVE-2019-16943) + (reported by b5mali4 / root@codersec.net) + #2498: Block one more gadget type (log4j-extras/1.2, CVE-2019-17531) +2:181c +3:181c + [2.9.10.x micro-patches omitted] diff --git a/src/python/merge_conflict_analysis_diffs/921/gitmerge_recursive_histogram/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/921/gitmerge_recursive_histogram/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..f1911744f6 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/921/gitmerge_recursive_histogram/diff_VERSION-2.x.txt @@ -0,0 +1,278 @@ +==== +1:7,12c + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) +2:7,25c + <<<<<<< HEAD + 2.10.5.1 (02-Dec-2020) + ||||||| 3d2903e8a + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) + ======= + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + #2996: Block 2 more gadget types (placeholder) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) + >>>>>>> TEMP_RIGHT_BRANCH +3:7c + 2.10.5.1 (02-Dec-2020) +====1 +1:17,18c + #2854: Block one more gadget type (javax.swing, CVE-2020-xxx) + (reported by Yangkun(ICSL)) +2:29a +3:11a +====1 +1:20c + 2.9.10.6 (24-Aug-2020) +2:31,197c +3:13,179c + 2.10.5 (21-Jul-2020) + + #2787 (partial fix): NPE after add mixin for enum + (reported by Denis K) + + 2.10.4 (03-May-2020) + + #2679: `ObjectMapper.readValue("123", Void.TYPE)` throws "should never occur" + (reported by Endre S) + + 2.10.3 (03-Mar-2020) + + #2482: `JSONMappingException` `Location` column number is one line Behind the actual + location + (reported by Kamal A, fixed by Ivo S) + #2599: NoClassDefFoundError at DeserializationContext. on Android 4.1.2 + and Jackson 2.10.0 + (reported by Tobias P) + #2602: ByteBufferSerializer produces unexpected results with a duplicated ByteBuffer + and a position > 0 + (reported by Eduard T) + #2605: Failure to deserializer polymorphic subtypes of base type `Enum` + (reported by uewle@github) + #2610: `EXTERNAL_PROPERTY` doesn't work with `@JsonIgnoreProperties` + (reported, fix suggested by Alexander S) + + 2.10.2 (05-Jan-2020) + + #2101: `FAIL_ON_NULL_FOR_PRIMITIVES` failure does not indicate field name in exception message + (reported by raderio@github) + + 2.10.1 (09-Nov-2019) + + #2457: Extended enum values are not handled as enums when used as Map keys + (reported by Andrey K) + #2473: Array index missing in path of `JsonMappingException` for `Collection`, + with custom deserializer + (reported by João G) + #2475: `StringCollectionSerializer` calls `JsonGenerator.setCurrentValue(value)`, + which messes up current value for sibling properties + (reported by Ryan B) + #2485: Add `uses` for `Module` in module-info + (contributed by Marc M) + #2513: BigDecimalAsStringSerializer in NumberSerializer throws IllegalStateException in 2.10 + (reported by Johan H) + #2519: Serializing `BigDecimal` values inside containers ignores shape override + (reported by Richard W) + #2520: Sub-optimal exception message when failing to deserialize non-static inner classes + (reported by Mark S) + #2529: Add tests to ensure `EnumSet` and `EnumMap` work correctly with "null-as-empty" + #2534: Add `BasicPolymorphicTypeValidator.Builder.allowIfSubTypeIsArray()` + #2535: Allow String-to-byte[] coercion for String-value collections + + 2.10.0 (26-Sep-2019) + + #18: Make `JsonNode` serializable + #1093: Default typing does not work with `writerFor(Object.class)` + (reported by hoomanv@github) + #1675: Remove "impossible" `IOException` in `readTree()` and `readValue()` `ObjectMapper` + methods which accept Strings + (requested by matthew-pwnieexpress@github) + #1954: Add Builder pattern for creating configured `ObjectMapper` instances + #1995: Limit size of `DeserializerCache`, auto-flush on exceeding + #2059: Remove `final` modifier for `TypeFactory` + (requested by Thibaut R) + #2077: `JsonTypeInfo` with a subtype having `JsonFormat.Shape.ARRAY` and + no fields generates `{}` not `[]` + (reported by Sadayuki F) + #2115: Support naive deserialization of `Serializable` values as "untyped", same + as `java.lang.Object` + (requested by Christopher S) + #2116: Make NumberSerializers.Base public and its inherited classes not final + (requested by Édouard M) + #2126: `DeserializationContext.instantiationException()` throws `InvalidDefinitionException` + #2129: Add `SerializationFeature.WRITE_ENUM_KEYS_USING_INDEX`, separate from value setting + (suggested by renzihui@github) + #2133: Improve `DeserializationProblemHandler.handleUnexpectedToken()` to allow handling of + Collection problems + (contributed by Semyon L) + #2149: Add `MapperFeature.ACCEPT_CASE_INSENSITIVE_VALUES` + (suggested by Craig P) + #2153: Add `JsonMapper` to replace generic `ObjectMapper` usage + #2164: `FactoryBasedEnumDeserializer` does not respect + `DeserializationFeature.WRAP_EXCEPTIONS` + (reported by Yiqiu H) + #2187: Make `JsonNode.toString()` use shared `ObjectMapper` to produce valid json + #2189: `TreeTraversingParser` does not check int bounds + (reported by Alexander S) + #2195: Add abstraction `PolymorphicTypeValidator`, for limiting subtypes allowed by + default typing, `@JsonTypeInfo` + #2196: Type safety for `readValue()` with `TypeReference` + (suggested by nguyenfilip@github) + #2204: Add `JsonNode.isEmpty()` as convenience alias + #2211: Change of behavior (2.8 -> 2.9) with `ObjectMapper.readTree(input)` with no content + #2217: Suboptimal memory allocation in `TextNode.getBinaryValue()` + (reported by Christoph B) + #2220: Force serialization always for `convertValue()`; avoid short-cuts + #2223: Add `missingNode()` method in `JsonNodeFactory` + #2227: Minor cleanup of exception message for `Enum` binding failure + (reported by RightHandedMonkey@github) + #2230: `WRITE_BIGDECIMAL_AS_PLAIN` is ignored if `@JsonFormat` is used + (reported by Pavel C) + #2236: Type id not provided on `Double.NaN`, `Infinity` with `@JsonTypeInfo` + (reported by C-B-B@github) + #2237: Add "required" methods in `JsonNode`: `required(String | int)`, + `requiredAt(JsonPointer)` + #2241: Add `PropertyNamingStrategy.LOWER_DOT_CASE` for dot-delimited names + (contributed by zenglian@github.com) + #2251: Getter that returns an abstract collection breaks a delegating `@JsonCreator` + #2265: Inconsistent handling of Collections$UnmodifiableList vs Collections$UnmodifiableRandomAccessList + #2273: Add basic Java 9+ module info + #2280: JsonMerge not work with constructor args + (reported by Deblock T) + #2309: READ_ENUMS_USING_TO_STRING doesn't support null values + (reported, fix suggested by Ben A) + #2311: Unnecessary MultiView creation for property writers + (suggested by Manuel H) + #2331: `JsonMappingException` through nested getter with generic wildcard return type + (reported by sunchezz89@github) + #2336: `MapDeserializer` can not merge `Map`s with polymorphic values + (reported by Robert G) + #2338: Suboptimal return type for `JsonNode.withArray()` + (reported by Victor N) + #2339: Suboptimal return type for `ObjectNode.set()` + (reported by Victor N) + #2348: Add sanity checks for `ObjectMapper.readXXX()` methods + (requested by ebundy@github) + #2349: Add option `DefaultTyping.EVERYTHING` to support Kotlin data classes + #2357: Lack of path on MismatchedInputException + (suggested by TheEin@github) + #2378: `@JsonAlias` doesn't work with AutoValue + (reported by David H) + #2390: `Iterable` serialization breaks when adding `@JsonFilter` annotation + (reported by Chris M) + #2392: `BeanDeserializerModifier.modifyDeserializer()` not applied to custom bean deserializers + (reported by andreasbaus@github) + #2393: `TreeTraversingParser.getLongValue()` incorrectly checks `canConvertToInt()` + (reported by RabbidDog@github) + #2398: Replace recursion in `TokenBuffer.copyCurrentStructure()` with iteration + (reported by Sam S) + #2415: Builder-based POJO deserializer should pass builder instance, not type, + to `handleUnknownVanilla()` + (proposed by Vladimir T, follow up to #822) + #2416: Optimize `ValueInstantiator` construction for default `Collection`, `Map` types + #2422: `scala.collection.immutable.ListMap` fails to serialize since 2.9.3 + (reported by dejanlokar1@github) + #2424: Add global config override setting for `@JsonFormat.lenient()` + #2428: Use "activateDefaultTyping" over "enableDefaultTyping" in 2.10 with new methods + #2430: Change `ObjectMapper.valueToTree()` to convert `null` to `NullNode` + #2432: Add support for module bundles + (contributed by Marcos P) + #2433: Improve `NullNode.equals()` + (suggested by David B) + #2442: `ArrayNode.addAll()` adds raw `null` values which cause NPE on `deepCopy()` + and `toString()` + (reported, fix contributed by Hesham M) + #2446: Java 11: Unable to load JDK7 types (annotations, java.nio.file.Path): no Java7 support added + (reported by David C) + #2451: Add new `JsonValueFormat` value, `UUID` + #2453: Add `DeserializationContext.readTree(JsonParser)` convenience method + #2458: `Nulls` property metadata ignored for creators + (reported by XakepSDK@github) + #2466: Didn't find class "java.nio.file.Path" below Android api 26 + (reported by KevynBct@github) + #2467: Accept `JsonTypeInfo.As.WRAPPER_ARRAY` with no second argument to + deserialize as "null value" + (contributed by Martin C) +====1 +1:22,86c + #2798: Block one more gadget type (com.pastdev.httpcomponents, CVE-2020-24750) + (reported by Al1ex@knownsec) + #2814: Block one more gadget type (Anteros-DBCP, CVE-2020-24616) + (reported by ChenZhaojun) + #2826: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + #2827: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + + 2.9.10.5 (21-Jun-2020) + + #2688: Block one more gadget type (apache-drill, CVE-2020-14060) + (reported by Topsec(tcc)) + #2698: Block one more gadget type (weblogic/oracle-aqjms, CVE-2020-14061) + (reported by Fangrun Li) + #2704: Block one more gadget type (jaxp-ri, CVE-2020-14062) + (reported by XuYuanzhen) + #2765: Block one more gadget type (org.jsecurity, CVE-2020-14195) + (reported by Al1ex@knownsec) + + 2.9.10.4 (11-Apr-2020) + + #2631: Block one more gadget type (shaded-hikari-config, CVE-2020-9546) + (reported by threedr3am & LFY) + #2634: Block two more gadget types (ibatis-sqlmap, anteros-core; CVE-2020-9547 / CVE-2020-9548) + (reported by threedr3am & V1ZkRA) + #2642: Block one more gadget type (javax.swing, CVE-2020-10969) + (reported by threedr3am) + #2648: Block one more gadget type (shiro-core) + #2653: Block one more gadget type (shiro-core) + #2658: Block one more gadget type (ignite-jta, CVE-2020-10650) + (reported by Srikanth Ramu, threedr3am'follower) + #2659: Block one more gadget type (aries.transaction.jms, CVE-2020-10672) + (reported by Srikanth Ramu) + #2660: Block one more gadget type (caucho-quercus, CVE-2020-10673) + (reported by threedr3am'follower) + #2662: Block one more gadget type (bus-proxy, CVE-2020-10968) + (reported by XuYuanzhen) + #2664: Block one more gadget type (activemq-pool[-jms], CVE-2020-11111) + (reported by Srikanth Ramu) + #2666: Block one more gadget type (apache/commons-proxy, CVE-2020-11112) + (reported by Yiting Fan) + #2670: Block one more gadget type (openjpa, CVE-2020-11113) + (reported by XuYuanzhen) + #2680: Block one more gadget type (SSRF, spring-aop, CVE-2020-11619) + #2682: Block one more gadget type (commons-jelly, CVE-2020-11620) + + 2.9.10.3 (23-Feb-2020) + + #2620: Block one more gadget type (xbean-reflect/JNDI - CVE-2020-8840) + (reported by threedr3am@github) + + 2.9.10.2 (03-Jan-2020) + + #2526: Block two more gadget types (ehcache/JNDI - CVE-2019-20330) + (repoerted by UltramanGaia) + #2544: java.lang.NoClassDefFoundError Thrown for compact profile1 + (reported by Jon A) + + 2.9.10.1 (20-Oct-2019) + + #2478: Block two more gadget types (commons-dbcp, p6spy, + CVE-2019-16942 / CVE-2019-16943) + (reported by b5mali4 / root@codersec.net) + #2498: Block one more gadget type (log4j-extras/1.2, CVE-2019-17531) +2:199c +3:181c + [2.9.10.x micro-patches omitted] diff --git a/src/python/merge_conflict_analysis_diffs/921/gitmerge_recursive_ignorespace/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/921/gitmerge_recursive_ignorespace/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..f1911744f6 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/921/gitmerge_recursive_ignorespace/diff_VERSION-2.x.txt @@ -0,0 +1,278 @@ +==== +1:7,12c + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) +2:7,25c + <<<<<<< HEAD + 2.10.5.1 (02-Dec-2020) + ||||||| 3d2903e8a + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) + ======= + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + #2996: Block 2 more gadget types (placeholder) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) + >>>>>>> TEMP_RIGHT_BRANCH +3:7c + 2.10.5.1 (02-Dec-2020) +====1 +1:17,18c + #2854: Block one more gadget type (javax.swing, CVE-2020-xxx) + (reported by Yangkun(ICSL)) +2:29a +3:11a +====1 +1:20c + 2.9.10.6 (24-Aug-2020) +2:31,197c +3:13,179c + 2.10.5 (21-Jul-2020) + + #2787 (partial fix): NPE after add mixin for enum + (reported by Denis K) + + 2.10.4 (03-May-2020) + + #2679: `ObjectMapper.readValue("123", Void.TYPE)` throws "should never occur" + (reported by Endre S) + + 2.10.3 (03-Mar-2020) + + #2482: `JSONMappingException` `Location` column number is one line Behind the actual + location + (reported by Kamal A, fixed by Ivo S) + #2599: NoClassDefFoundError at DeserializationContext. on Android 4.1.2 + and Jackson 2.10.0 + (reported by Tobias P) + #2602: ByteBufferSerializer produces unexpected results with a duplicated ByteBuffer + and a position > 0 + (reported by Eduard T) + #2605: Failure to deserializer polymorphic subtypes of base type `Enum` + (reported by uewle@github) + #2610: `EXTERNAL_PROPERTY` doesn't work with `@JsonIgnoreProperties` + (reported, fix suggested by Alexander S) + + 2.10.2 (05-Jan-2020) + + #2101: `FAIL_ON_NULL_FOR_PRIMITIVES` failure does not indicate field name in exception message + (reported by raderio@github) + + 2.10.1 (09-Nov-2019) + + #2457: Extended enum values are not handled as enums when used as Map keys + (reported by Andrey K) + #2473: Array index missing in path of `JsonMappingException` for `Collection`, + with custom deserializer + (reported by João G) + #2475: `StringCollectionSerializer` calls `JsonGenerator.setCurrentValue(value)`, + which messes up current value for sibling properties + (reported by Ryan B) + #2485: Add `uses` for `Module` in module-info + (contributed by Marc M) + #2513: BigDecimalAsStringSerializer in NumberSerializer throws IllegalStateException in 2.10 + (reported by Johan H) + #2519: Serializing `BigDecimal` values inside containers ignores shape override + (reported by Richard W) + #2520: Sub-optimal exception message when failing to deserialize non-static inner classes + (reported by Mark S) + #2529: Add tests to ensure `EnumSet` and `EnumMap` work correctly with "null-as-empty" + #2534: Add `BasicPolymorphicTypeValidator.Builder.allowIfSubTypeIsArray()` + #2535: Allow String-to-byte[] coercion for String-value collections + + 2.10.0 (26-Sep-2019) + + #18: Make `JsonNode` serializable + #1093: Default typing does not work with `writerFor(Object.class)` + (reported by hoomanv@github) + #1675: Remove "impossible" `IOException` in `readTree()` and `readValue()` `ObjectMapper` + methods which accept Strings + (requested by matthew-pwnieexpress@github) + #1954: Add Builder pattern for creating configured `ObjectMapper` instances + #1995: Limit size of `DeserializerCache`, auto-flush on exceeding + #2059: Remove `final` modifier for `TypeFactory` + (requested by Thibaut R) + #2077: `JsonTypeInfo` with a subtype having `JsonFormat.Shape.ARRAY` and + no fields generates `{}` not `[]` + (reported by Sadayuki F) + #2115: Support naive deserialization of `Serializable` values as "untyped", same + as `java.lang.Object` + (requested by Christopher S) + #2116: Make NumberSerializers.Base public and its inherited classes not final + (requested by Édouard M) + #2126: `DeserializationContext.instantiationException()` throws `InvalidDefinitionException` + #2129: Add `SerializationFeature.WRITE_ENUM_KEYS_USING_INDEX`, separate from value setting + (suggested by renzihui@github) + #2133: Improve `DeserializationProblemHandler.handleUnexpectedToken()` to allow handling of + Collection problems + (contributed by Semyon L) + #2149: Add `MapperFeature.ACCEPT_CASE_INSENSITIVE_VALUES` + (suggested by Craig P) + #2153: Add `JsonMapper` to replace generic `ObjectMapper` usage + #2164: `FactoryBasedEnumDeserializer` does not respect + `DeserializationFeature.WRAP_EXCEPTIONS` + (reported by Yiqiu H) + #2187: Make `JsonNode.toString()` use shared `ObjectMapper` to produce valid json + #2189: `TreeTraversingParser` does not check int bounds + (reported by Alexander S) + #2195: Add abstraction `PolymorphicTypeValidator`, for limiting subtypes allowed by + default typing, `@JsonTypeInfo` + #2196: Type safety for `readValue()` with `TypeReference` + (suggested by nguyenfilip@github) + #2204: Add `JsonNode.isEmpty()` as convenience alias + #2211: Change of behavior (2.8 -> 2.9) with `ObjectMapper.readTree(input)` with no content + #2217: Suboptimal memory allocation in `TextNode.getBinaryValue()` + (reported by Christoph B) + #2220: Force serialization always for `convertValue()`; avoid short-cuts + #2223: Add `missingNode()` method in `JsonNodeFactory` + #2227: Minor cleanup of exception message for `Enum` binding failure + (reported by RightHandedMonkey@github) + #2230: `WRITE_BIGDECIMAL_AS_PLAIN` is ignored if `@JsonFormat` is used + (reported by Pavel C) + #2236: Type id not provided on `Double.NaN`, `Infinity` with `@JsonTypeInfo` + (reported by C-B-B@github) + #2237: Add "required" methods in `JsonNode`: `required(String | int)`, + `requiredAt(JsonPointer)` + #2241: Add `PropertyNamingStrategy.LOWER_DOT_CASE` for dot-delimited names + (contributed by zenglian@github.com) + #2251: Getter that returns an abstract collection breaks a delegating `@JsonCreator` + #2265: Inconsistent handling of Collections$UnmodifiableList vs Collections$UnmodifiableRandomAccessList + #2273: Add basic Java 9+ module info + #2280: JsonMerge not work with constructor args + (reported by Deblock T) + #2309: READ_ENUMS_USING_TO_STRING doesn't support null values + (reported, fix suggested by Ben A) + #2311: Unnecessary MultiView creation for property writers + (suggested by Manuel H) + #2331: `JsonMappingException` through nested getter with generic wildcard return type + (reported by sunchezz89@github) + #2336: `MapDeserializer` can not merge `Map`s with polymorphic values + (reported by Robert G) + #2338: Suboptimal return type for `JsonNode.withArray()` + (reported by Victor N) + #2339: Suboptimal return type for `ObjectNode.set()` + (reported by Victor N) + #2348: Add sanity checks for `ObjectMapper.readXXX()` methods + (requested by ebundy@github) + #2349: Add option `DefaultTyping.EVERYTHING` to support Kotlin data classes + #2357: Lack of path on MismatchedInputException + (suggested by TheEin@github) + #2378: `@JsonAlias` doesn't work with AutoValue + (reported by David H) + #2390: `Iterable` serialization breaks when adding `@JsonFilter` annotation + (reported by Chris M) + #2392: `BeanDeserializerModifier.modifyDeserializer()` not applied to custom bean deserializers + (reported by andreasbaus@github) + #2393: `TreeTraversingParser.getLongValue()` incorrectly checks `canConvertToInt()` + (reported by RabbidDog@github) + #2398: Replace recursion in `TokenBuffer.copyCurrentStructure()` with iteration + (reported by Sam S) + #2415: Builder-based POJO deserializer should pass builder instance, not type, + to `handleUnknownVanilla()` + (proposed by Vladimir T, follow up to #822) + #2416: Optimize `ValueInstantiator` construction for default `Collection`, `Map` types + #2422: `scala.collection.immutable.ListMap` fails to serialize since 2.9.3 + (reported by dejanlokar1@github) + #2424: Add global config override setting for `@JsonFormat.lenient()` + #2428: Use "activateDefaultTyping" over "enableDefaultTyping" in 2.10 with new methods + #2430: Change `ObjectMapper.valueToTree()` to convert `null` to `NullNode` + #2432: Add support for module bundles + (contributed by Marcos P) + #2433: Improve `NullNode.equals()` + (suggested by David B) + #2442: `ArrayNode.addAll()` adds raw `null` values which cause NPE on `deepCopy()` + and `toString()` + (reported, fix contributed by Hesham M) + #2446: Java 11: Unable to load JDK7 types (annotations, java.nio.file.Path): no Java7 support added + (reported by David C) + #2451: Add new `JsonValueFormat` value, `UUID` + #2453: Add `DeserializationContext.readTree(JsonParser)` convenience method + #2458: `Nulls` property metadata ignored for creators + (reported by XakepSDK@github) + #2466: Didn't find class "java.nio.file.Path" below Android api 26 + (reported by KevynBct@github) + #2467: Accept `JsonTypeInfo.As.WRAPPER_ARRAY` with no second argument to + deserialize as "null value" + (contributed by Martin C) +====1 +1:22,86c + #2798: Block one more gadget type (com.pastdev.httpcomponents, CVE-2020-24750) + (reported by Al1ex@knownsec) + #2814: Block one more gadget type (Anteros-DBCP, CVE-2020-24616) + (reported by ChenZhaojun) + #2826: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + #2827: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + + 2.9.10.5 (21-Jun-2020) + + #2688: Block one more gadget type (apache-drill, CVE-2020-14060) + (reported by Topsec(tcc)) + #2698: Block one more gadget type (weblogic/oracle-aqjms, CVE-2020-14061) + (reported by Fangrun Li) + #2704: Block one more gadget type (jaxp-ri, CVE-2020-14062) + (reported by XuYuanzhen) + #2765: Block one more gadget type (org.jsecurity, CVE-2020-14195) + (reported by Al1ex@knownsec) + + 2.9.10.4 (11-Apr-2020) + + #2631: Block one more gadget type (shaded-hikari-config, CVE-2020-9546) + (reported by threedr3am & LFY) + #2634: Block two more gadget types (ibatis-sqlmap, anteros-core; CVE-2020-9547 / CVE-2020-9548) + (reported by threedr3am & V1ZkRA) + #2642: Block one more gadget type (javax.swing, CVE-2020-10969) + (reported by threedr3am) + #2648: Block one more gadget type (shiro-core) + #2653: Block one more gadget type (shiro-core) + #2658: Block one more gadget type (ignite-jta, CVE-2020-10650) + (reported by Srikanth Ramu, threedr3am'follower) + #2659: Block one more gadget type (aries.transaction.jms, CVE-2020-10672) + (reported by Srikanth Ramu) + #2660: Block one more gadget type (caucho-quercus, CVE-2020-10673) + (reported by threedr3am'follower) + #2662: Block one more gadget type (bus-proxy, CVE-2020-10968) + (reported by XuYuanzhen) + #2664: Block one more gadget type (activemq-pool[-jms], CVE-2020-11111) + (reported by Srikanth Ramu) + #2666: Block one more gadget type (apache/commons-proxy, CVE-2020-11112) + (reported by Yiting Fan) + #2670: Block one more gadget type (openjpa, CVE-2020-11113) + (reported by XuYuanzhen) + #2680: Block one more gadget type (SSRF, spring-aop, CVE-2020-11619) + #2682: Block one more gadget type (commons-jelly, CVE-2020-11620) + + 2.9.10.3 (23-Feb-2020) + + #2620: Block one more gadget type (xbean-reflect/JNDI - CVE-2020-8840) + (reported by threedr3am@github) + + 2.9.10.2 (03-Jan-2020) + + #2526: Block two more gadget types (ehcache/JNDI - CVE-2019-20330) + (repoerted by UltramanGaia) + #2544: java.lang.NoClassDefFoundError Thrown for compact profile1 + (reported by Jon A) + + 2.9.10.1 (20-Oct-2019) + + #2478: Block two more gadget types (commons-dbcp, p6spy, + CVE-2019-16942 / CVE-2019-16943) + (reported by b5mali4 / root@codersec.net) + #2498: Block one more gadget type (log4j-extras/1.2, CVE-2019-17531) +2:199c +3:181c + [2.9.10.x micro-patches omitted] diff --git a/src/python/merge_conflict_analysis_diffs/921/gitmerge_recursive_minimal/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/921/gitmerge_recursive_minimal/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..f1911744f6 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/921/gitmerge_recursive_minimal/diff_VERSION-2.x.txt @@ -0,0 +1,278 @@ +==== +1:7,12c + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) +2:7,25c + <<<<<<< HEAD + 2.10.5.1 (02-Dec-2020) + ||||||| 3d2903e8a + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) + ======= + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + #2996: Block 2 more gadget types (placeholder) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) + >>>>>>> TEMP_RIGHT_BRANCH +3:7c + 2.10.5.1 (02-Dec-2020) +====1 +1:17,18c + #2854: Block one more gadget type (javax.swing, CVE-2020-xxx) + (reported by Yangkun(ICSL)) +2:29a +3:11a +====1 +1:20c + 2.9.10.6 (24-Aug-2020) +2:31,197c +3:13,179c + 2.10.5 (21-Jul-2020) + + #2787 (partial fix): NPE after add mixin for enum + (reported by Denis K) + + 2.10.4 (03-May-2020) + + #2679: `ObjectMapper.readValue("123", Void.TYPE)` throws "should never occur" + (reported by Endre S) + + 2.10.3 (03-Mar-2020) + + #2482: `JSONMappingException` `Location` column number is one line Behind the actual + location + (reported by Kamal A, fixed by Ivo S) + #2599: NoClassDefFoundError at DeserializationContext. on Android 4.1.2 + and Jackson 2.10.0 + (reported by Tobias P) + #2602: ByteBufferSerializer produces unexpected results with a duplicated ByteBuffer + and a position > 0 + (reported by Eduard T) + #2605: Failure to deserializer polymorphic subtypes of base type `Enum` + (reported by uewle@github) + #2610: `EXTERNAL_PROPERTY` doesn't work with `@JsonIgnoreProperties` + (reported, fix suggested by Alexander S) + + 2.10.2 (05-Jan-2020) + + #2101: `FAIL_ON_NULL_FOR_PRIMITIVES` failure does not indicate field name in exception message + (reported by raderio@github) + + 2.10.1 (09-Nov-2019) + + #2457: Extended enum values are not handled as enums when used as Map keys + (reported by Andrey K) + #2473: Array index missing in path of `JsonMappingException` for `Collection`, + with custom deserializer + (reported by João G) + #2475: `StringCollectionSerializer` calls `JsonGenerator.setCurrentValue(value)`, + which messes up current value for sibling properties + (reported by Ryan B) + #2485: Add `uses` for `Module` in module-info + (contributed by Marc M) + #2513: BigDecimalAsStringSerializer in NumberSerializer throws IllegalStateException in 2.10 + (reported by Johan H) + #2519: Serializing `BigDecimal` values inside containers ignores shape override + (reported by Richard W) + #2520: Sub-optimal exception message when failing to deserialize non-static inner classes + (reported by Mark S) + #2529: Add tests to ensure `EnumSet` and `EnumMap` work correctly with "null-as-empty" + #2534: Add `BasicPolymorphicTypeValidator.Builder.allowIfSubTypeIsArray()` + #2535: Allow String-to-byte[] coercion for String-value collections + + 2.10.0 (26-Sep-2019) + + #18: Make `JsonNode` serializable + #1093: Default typing does not work with `writerFor(Object.class)` + (reported by hoomanv@github) + #1675: Remove "impossible" `IOException` in `readTree()` and `readValue()` `ObjectMapper` + methods which accept Strings + (requested by matthew-pwnieexpress@github) + #1954: Add Builder pattern for creating configured `ObjectMapper` instances + #1995: Limit size of `DeserializerCache`, auto-flush on exceeding + #2059: Remove `final` modifier for `TypeFactory` + (requested by Thibaut R) + #2077: `JsonTypeInfo` with a subtype having `JsonFormat.Shape.ARRAY` and + no fields generates `{}` not `[]` + (reported by Sadayuki F) + #2115: Support naive deserialization of `Serializable` values as "untyped", same + as `java.lang.Object` + (requested by Christopher S) + #2116: Make NumberSerializers.Base public and its inherited classes not final + (requested by Édouard M) + #2126: `DeserializationContext.instantiationException()` throws `InvalidDefinitionException` + #2129: Add `SerializationFeature.WRITE_ENUM_KEYS_USING_INDEX`, separate from value setting + (suggested by renzihui@github) + #2133: Improve `DeserializationProblemHandler.handleUnexpectedToken()` to allow handling of + Collection problems + (contributed by Semyon L) + #2149: Add `MapperFeature.ACCEPT_CASE_INSENSITIVE_VALUES` + (suggested by Craig P) + #2153: Add `JsonMapper` to replace generic `ObjectMapper` usage + #2164: `FactoryBasedEnumDeserializer` does not respect + `DeserializationFeature.WRAP_EXCEPTIONS` + (reported by Yiqiu H) + #2187: Make `JsonNode.toString()` use shared `ObjectMapper` to produce valid json + #2189: `TreeTraversingParser` does not check int bounds + (reported by Alexander S) + #2195: Add abstraction `PolymorphicTypeValidator`, for limiting subtypes allowed by + default typing, `@JsonTypeInfo` + #2196: Type safety for `readValue()` with `TypeReference` + (suggested by nguyenfilip@github) + #2204: Add `JsonNode.isEmpty()` as convenience alias + #2211: Change of behavior (2.8 -> 2.9) with `ObjectMapper.readTree(input)` with no content + #2217: Suboptimal memory allocation in `TextNode.getBinaryValue()` + (reported by Christoph B) + #2220: Force serialization always for `convertValue()`; avoid short-cuts + #2223: Add `missingNode()` method in `JsonNodeFactory` + #2227: Minor cleanup of exception message for `Enum` binding failure + (reported by RightHandedMonkey@github) + #2230: `WRITE_BIGDECIMAL_AS_PLAIN` is ignored if `@JsonFormat` is used + (reported by Pavel C) + #2236: Type id not provided on `Double.NaN`, `Infinity` with `@JsonTypeInfo` + (reported by C-B-B@github) + #2237: Add "required" methods in `JsonNode`: `required(String | int)`, + `requiredAt(JsonPointer)` + #2241: Add `PropertyNamingStrategy.LOWER_DOT_CASE` for dot-delimited names + (contributed by zenglian@github.com) + #2251: Getter that returns an abstract collection breaks a delegating `@JsonCreator` + #2265: Inconsistent handling of Collections$UnmodifiableList vs Collections$UnmodifiableRandomAccessList + #2273: Add basic Java 9+ module info + #2280: JsonMerge not work with constructor args + (reported by Deblock T) + #2309: READ_ENUMS_USING_TO_STRING doesn't support null values + (reported, fix suggested by Ben A) + #2311: Unnecessary MultiView creation for property writers + (suggested by Manuel H) + #2331: `JsonMappingException` through nested getter with generic wildcard return type + (reported by sunchezz89@github) + #2336: `MapDeserializer` can not merge `Map`s with polymorphic values + (reported by Robert G) + #2338: Suboptimal return type for `JsonNode.withArray()` + (reported by Victor N) + #2339: Suboptimal return type for `ObjectNode.set()` + (reported by Victor N) + #2348: Add sanity checks for `ObjectMapper.readXXX()` methods + (requested by ebundy@github) + #2349: Add option `DefaultTyping.EVERYTHING` to support Kotlin data classes + #2357: Lack of path on MismatchedInputException + (suggested by TheEin@github) + #2378: `@JsonAlias` doesn't work with AutoValue + (reported by David H) + #2390: `Iterable` serialization breaks when adding `@JsonFilter` annotation + (reported by Chris M) + #2392: `BeanDeserializerModifier.modifyDeserializer()` not applied to custom bean deserializers + (reported by andreasbaus@github) + #2393: `TreeTraversingParser.getLongValue()` incorrectly checks `canConvertToInt()` + (reported by RabbidDog@github) + #2398: Replace recursion in `TokenBuffer.copyCurrentStructure()` with iteration + (reported by Sam S) + #2415: Builder-based POJO deserializer should pass builder instance, not type, + to `handleUnknownVanilla()` + (proposed by Vladimir T, follow up to #822) + #2416: Optimize `ValueInstantiator` construction for default `Collection`, `Map` types + #2422: `scala.collection.immutable.ListMap` fails to serialize since 2.9.3 + (reported by dejanlokar1@github) + #2424: Add global config override setting for `@JsonFormat.lenient()` + #2428: Use "activateDefaultTyping" over "enableDefaultTyping" in 2.10 with new methods + #2430: Change `ObjectMapper.valueToTree()` to convert `null` to `NullNode` + #2432: Add support for module bundles + (contributed by Marcos P) + #2433: Improve `NullNode.equals()` + (suggested by David B) + #2442: `ArrayNode.addAll()` adds raw `null` values which cause NPE on `deepCopy()` + and `toString()` + (reported, fix contributed by Hesham M) + #2446: Java 11: Unable to load JDK7 types (annotations, java.nio.file.Path): no Java7 support added + (reported by David C) + #2451: Add new `JsonValueFormat` value, `UUID` + #2453: Add `DeserializationContext.readTree(JsonParser)` convenience method + #2458: `Nulls` property metadata ignored for creators + (reported by XakepSDK@github) + #2466: Didn't find class "java.nio.file.Path" below Android api 26 + (reported by KevynBct@github) + #2467: Accept `JsonTypeInfo.As.WRAPPER_ARRAY` with no second argument to + deserialize as "null value" + (contributed by Martin C) +====1 +1:22,86c + #2798: Block one more gadget type (com.pastdev.httpcomponents, CVE-2020-24750) + (reported by Al1ex@knownsec) + #2814: Block one more gadget type (Anteros-DBCP, CVE-2020-24616) + (reported by ChenZhaojun) + #2826: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + #2827: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + + 2.9.10.5 (21-Jun-2020) + + #2688: Block one more gadget type (apache-drill, CVE-2020-14060) + (reported by Topsec(tcc)) + #2698: Block one more gadget type (weblogic/oracle-aqjms, CVE-2020-14061) + (reported by Fangrun Li) + #2704: Block one more gadget type (jaxp-ri, CVE-2020-14062) + (reported by XuYuanzhen) + #2765: Block one more gadget type (org.jsecurity, CVE-2020-14195) + (reported by Al1ex@knownsec) + + 2.9.10.4 (11-Apr-2020) + + #2631: Block one more gadget type (shaded-hikari-config, CVE-2020-9546) + (reported by threedr3am & LFY) + #2634: Block two more gadget types (ibatis-sqlmap, anteros-core; CVE-2020-9547 / CVE-2020-9548) + (reported by threedr3am & V1ZkRA) + #2642: Block one more gadget type (javax.swing, CVE-2020-10969) + (reported by threedr3am) + #2648: Block one more gadget type (shiro-core) + #2653: Block one more gadget type (shiro-core) + #2658: Block one more gadget type (ignite-jta, CVE-2020-10650) + (reported by Srikanth Ramu, threedr3am'follower) + #2659: Block one more gadget type (aries.transaction.jms, CVE-2020-10672) + (reported by Srikanth Ramu) + #2660: Block one more gadget type (caucho-quercus, CVE-2020-10673) + (reported by threedr3am'follower) + #2662: Block one more gadget type (bus-proxy, CVE-2020-10968) + (reported by XuYuanzhen) + #2664: Block one more gadget type (activemq-pool[-jms], CVE-2020-11111) + (reported by Srikanth Ramu) + #2666: Block one more gadget type (apache/commons-proxy, CVE-2020-11112) + (reported by Yiting Fan) + #2670: Block one more gadget type (openjpa, CVE-2020-11113) + (reported by XuYuanzhen) + #2680: Block one more gadget type (SSRF, spring-aop, CVE-2020-11619) + #2682: Block one more gadget type (commons-jelly, CVE-2020-11620) + + 2.9.10.3 (23-Feb-2020) + + #2620: Block one more gadget type (xbean-reflect/JNDI - CVE-2020-8840) + (reported by threedr3am@github) + + 2.9.10.2 (03-Jan-2020) + + #2526: Block two more gadget types (ehcache/JNDI - CVE-2019-20330) + (repoerted by UltramanGaia) + #2544: java.lang.NoClassDefFoundError Thrown for compact profile1 + (reported by Jon A) + + 2.9.10.1 (20-Oct-2019) + + #2478: Block two more gadget types (commons-dbcp, p6spy, + CVE-2019-16942 / CVE-2019-16943) + (reported by b5mali4 / root@codersec.net) + #2498: Block one more gadget type (log4j-extras/1.2, CVE-2019-17531) +2:199c +3:181c + [2.9.10.x micro-patches omitted] diff --git a/src/python/merge_conflict_analysis_diffs/921/gitmerge_recursive_myers/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/921/gitmerge_recursive_myers/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..f1911744f6 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/921/gitmerge_recursive_myers/diff_VERSION-2.x.txt @@ -0,0 +1,278 @@ +==== +1:7,12c + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) +2:7,25c + <<<<<<< HEAD + 2.10.5.1 (02-Dec-2020) + ||||||| 3d2903e8a + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) + ======= + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + #2996: Block 2 more gadget types (placeholder) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) + >>>>>>> TEMP_RIGHT_BRANCH +3:7c + 2.10.5.1 (02-Dec-2020) +====1 +1:17,18c + #2854: Block one more gadget type (javax.swing, CVE-2020-xxx) + (reported by Yangkun(ICSL)) +2:29a +3:11a +====1 +1:20c + 2.9.10.6 (24-Aug-2020) +2:31,197c +3:13,179c + 2.10.5 (21-Jul-2020) + + #2787 (partial fix): NPE after add mixin for enum + (reported by Denis K) + + 2.10.4 (03-May-2020) + + #2679: `ObjectMapper.readValue("123", Void.TYPE)` throws "should never occur" + (reported by Endre S) + + 2.10.3 (03-Mar-2020) + + #2482: `JSONMappingException` `Location` column number is one line Behind the actual + location + (reported by Kamal A, fixed by Ivo S) + #2599: NoClassDefFoundError at DeserializationContext. on Android 4.1.2 + and Jackson 2.10.0 + (reported by Tobias P) + #2602: ByteBufferSerializer produces unexpected results with a duplicated ByteBuffer + and a position > 0 + (reported by Eduard T) + #2605: Failure to deserializer polymorphic subtypes of base type `Enum` + (reported by uewle@github) + #2610: `EXTERNAL_PROPERTY` doesn't work with `@JsonIgnoreProperties` + (reported, fix suggested by Alexander S) + + 2.10.2 (05-Jan-2020) + + #2101: `FAIL_ON_NULL_FOR_PRIMITIVES` failure does not indicate field name in exception message + (reported by raderio@github) + + 2.10.1 (09-Nov-2019) + + #2457: Extended enum values are not handled as enums when used as Map keys + (reported by Andrey K) + #2473: Array index missing in path of `JsonMappingException` for `Collection`, + with custom deserializer + (reported by João G) + #2475: `StringCollectionSerializer` calls `JsonGenerator.setCurrentValue(value)`, + which messes up current value for sibling properties + (reported by Ryan B) + #2485: Add `uses` for `Module` in module-info + (contributed by Marc M) + #2513: BigDecimalAsStringSerializer in NumberSerializer throws IllegalStateException in 2.10 + (reported by Johan H) + #2519: Serializing `BigDecimal` values inside containers ignores shape override + (reported by Richard W) + #2520: Sub-optimal exception message when failing to deserialize non-static inner classes + (reported by Mark S) + #2529: Add tests to ensure `EnumSet` and `EnumMap` work correctly with "null-as-empty" + #2534: Add `BasicPolymorphicTypeValidator.Builder.allowIfSubTypeIsArray()` + #2535: Allow String-to-byte[] coercion for String-value collections + + 2.10.0 (26-Sep-2019) + + #18: Make `JsonNode` serializable + #1093: Default typing does not work with `writerFor(Object.class)` + (reported by hoomanv@github) + #1675: Remove "impossible" `IOException` in `readTree()` and `readValue()` `ObjectMapper` + methods which accept Strings + (requested by matthew-pwnieexpress@github) + #1954: Add Builder pattern for creating configured `ObjectMapper` instances + #1995: Limit size of `DeserializerCache`, auto-flush on exceeding + #2059: Remove `final` modifier for `TypeFactory` + (requested by Thibaut R) + #2077: `JsonTypeInfo` with a subtype having `JsonFormat.Shape.ARRAY` and + no fields generates `{}` not `[]` + (reported by Sadayuki F) + #2115: Support naive deserialization of `Serializable` values as "untyped", same + as `java.lang.Object` + (requested by Christopher S) + #2116: Make NumberSerializers.Base public and its inherited classes not final + (requested by Édouard M) + #2126: `DeserializationContext.instantiationException()` throws `InvalidDefinitionException` + #2129: Add `SerializationFeature.WRITE_ENUM_KEYS_USING_INDEX`, separate from value setting + (suggested by renzihui@github) + #2133: Improve `DeserializationProblemHandler.handleUnexpectedToken()` to allow handling of + Collection problems + (contributed by Semyon L) + #2149: Add `MapperFeature.ACCEPT_CASE_INSENSITIVE_VALUES` + (suggested by Craig P) + #2153: Add `JsonMapper` to replace generic `ObjectMapper` usage + #2164: `FactoryBasedEnumDeserializer` does not respect + `DeserializationFeature.WRAP_EXCEPTIONS` + (reported by Yiqiu H) + #2187: Make `JsonNode.toString()` use shared `ObjectMapper` to produce valid json + #2189: `TreeTraversingParser` does not check int bounds + (reported by Alexander S) + #2195: Add abstraction `PolymorphicTypeValidator`, for limiting subtypes allowed by + default typing, `@JsonTypeInfo` + #2196: Type safety for `readValue()` with `TypeReference` + (suggested by nguyenfilip@github) + #2204: Add `JsonNode.isEmpty()` as convenience alias + #2211: Change of behavior (2.8 -> 2.9) with `ObjectMapper.readTree(input)` with no content + #2217: Suboptimal memory allocation in `TextNode.getBinaryValue()` + (reported by Christoph B) + #2220: Force serialization always for `convertValue()`; avoid short-cuts + #2223: Add `missingNode()` method in `JsonNodeFactory` + #2227: Minor cleanup of exception message for `Enum` binding failure + (reported by RightHandedMonkey@github) + #2230: `WRITE_BIGDECIMAL_AS_PLAIN` is ignored if `@JsonFormat` is used + (reported by Pavel C) + #2236: Type id not provided on `Double.NaN`, `Infinity` with `@JsonTypeInfo` + (reported by C-B-B@github) + #2237: Add "required" methods in `JsonNode`: `required(String | int)`, + `requiredAt(JsonPointer)` + #2241: Add `PropertyNamingStrategy.LOWER_DOT_CASE` for dot-delimited names + (contributed by zenglian@github.com) + #2251: Getter that returns an abstract collection breaks a delegating `@JsonCreator` + #2265: Inconsistent handling of Collections$UnmodifiableList vs Collections$UnmodifiableRandomAccessList + #2273: Add basic Java 9+ module info + #2280: JsonMerge not work with constructor args + (reported by Deblock T) + #2309: READ_ENUMS_USING_TO_STRING doesn't support null values + (reported, fix suggested by Ben A) + #2311: Unnecessary MultiView creation for property writers + (suggested by Manuel H) + #2331: `JsonMappingException` through nested getter with generic wildcard return type + (reported by sunchezz89@github) + #2336: `MapDeserializer` can not merge `Map`s with polymorphic values + (reported by Robert G) + #2338: Suboptimal return type for `JsonNode.withArray()` + (reported by Victor N) + #2339: Suboptimal return type for `ObjectNode.set()` + (reported by Victor N) + #2348: Add sanity checks for `ObjectMapper.readXXX()` methods + (requested by ebundy@github) + #2349: Add option `DefaultTyping.EVERYTHING` to support Kotlin data classes + #2357: Lack of path on MismatchedInputException + (suggested by TheEin@github) + #2378: `@JsonAlias` doesn't work with AutoValue + (reported by David H) + #2390: `Iterable` serialization breaks when adding `@JsonFilter` annotation + (reported by Chris M) + #2392: `BeanDeserializerModifier.modifyDeserializer()` not applied to custom bean deserializers + (reported by andreasbaus@github) + #2393: `TreeTraversingParser.getLongValue()` incorrectly checks `canConvertToInt()` + (reported by RabbidDog@github) + #2398: Replace recursion in `TokenBuffer.copyCurrentStructure()` with iteration + (reported by Sam S) + #2415: Builder-based POJO deserializer should pass builder instance, not type, + to `handleUnknownVanilla()` + (proposed by Vladimir T, follow up to #822) + #2416: Optimize `ValueInstantiator` construction for default `Collection`, `Map` types + #2422: `scala.collection.immutable.ListMap` fails to serialize since 2.9.3 + (reported by dejanlokar1@github) + #2424: Add global config override setting for `@JsonFormat.lenient()` + #2428: Use "activateDefaultTyping" over "enableDefaultTyping" in 2.10 with new methods + #2430: Change `ObjectMapper.valueToTree()` to convert `null` to `NullNode` + #2432: Add support for module bundles + (contributed by Marcos P) + #2433: Improve `NullNode.equals()` + (suggested by David B) + #2442: `ArrayNode.addAll()` adds raw `null` values which cause NPE on `deepCopy()` + and `toString()` + (reported, fix contributed by Hesham M) + #2446: Java 11: Unable to load JDK7 types (annotations, java.nio.file.Path): no Java7 support added + (reported by David C) + #2451: Add new `JsonValueFormat` value, `UUID` + #2453: Add `DeserializationContext.readTree(JsonParser)` convenience method + #2458: `Nulls` property metadata ignored for creators + (reported by XakepSDK@github) + #2466: Didn't find class "java.nio.file.Path" below Android api 26 + (reported by KevynBct@github) + #2467: Accept `JsonTypeInfo.As.WRAPPER_ARRAY` with no second argument to + deserialize as "null value" + (contributed by Martin C) +====1 +1:22,86c + #2798: Block one more gadget type (com.pastdev.httpcomponents, CVE-2020-24750) + (reported by Al1ex@knownsec) + #2814: Block one more gadget type (Anteros-DBCP, CVE-2020-24616) + (reported by ChenZhaojun) + #2826: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + #2827: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + + 2.9.10.5 (21-Jun-2020) + + #2688: Block one more gadget type (apache-drill, CVE-2020-14060) + (reported by Topsec(tcc)) + #2698: Block one more gadget type (weblogic/oracle-aqjms, CVE-2020-14061) + (reported by Fangrun Li) + #2704: Block one more gadget type (jaxp-ri, CVE-2020-14062) + (reported by XuYuanzhen) + #2765: Block one more gadget type (org.jsecurity, CVE-2020-14195) + (reported by Al1ex@knownsec) + + 2.9.10.4 (11-Apr-2020) + + #2631: Block one more gadget type (shaded-hikari-config, CVE-2020-9546) + (reported by threedr3am & LFY) + #2634: Block two more gadget types (ibatis-sqlmap, anteros-core; CVE-2020-9547 / CVE-2020-9548) + (reported by threedr3am & V1ZkRA) + #2642: Block one more gadget type (javax.swing, CVE-2020-10969) + (reported by threedr3am) + #2648: Block one more gadget type (shiro-core) + #2653: Block one more gadget type (shiro-core) + #2658: Block one more gadget type (ignite-jta, CVE-2020-10650) + (reported by Srikanth Ramu, threedr3am'follower) + #2659: Block one more gadget type (aries.transaction.jms, CVE-2020-10672) + (reported by Srikanth Ramu) + #2660: Block one more gadget type (caucho-quercus, CVE-2020-10673) + (reported by threedr3am'follower) + #2662: Block one more gadget type (bus-proxy, CVE-2020-10968) + (reported by XuYuanzhen) + #2664: Block one more gadget type (activemq-pool[-jms], CVE-2020-11111) + (reported by Srikanth Ramu) + #2666: Block one more gadget type (apache/commons-proxy, CVE-2020-11112) + (reported by Yiting Fan) + #2670: Block one more gadget type (openjpa, CVE-2020-11113) + (reported by XuYuanzhen) + #2680: Block one more gadget type (SSRF, spring-aop, CVE-2020-11619) + #2682: Block one more gadget type (commons-jelly, CVE-2020-11620) + + 2.9.10.3 (23-Feb-2020) + + #2620: Block one more gadget type (xbean-reflect/JNDI - CVE-2020-8840) + (reported by threedr3am@github) + + 2.9.10.2 (03-Jan-2020) + + #2526: Block two more gadget types (ehcache/JNDI - CVE-2019-20330) + (repoerted by UltramanGaia) + #2544: java.lang.NoClassDefFoundError Thrown for compact profile1 + (reported by Jon A) + + 2.9.10.1 (20-Oct-2019) + + #2478: Block two more gadget types (commons-dbcp, p6spy, + CVE-2019-16942 / CVE-2019-16943) + (reported by b5mali4 / root@codersec.net) + #2498: Block one more gadget type (log4j-extras/1.2, CVE-2019-17531) +2:199c +3:181c + [2.9.10.x micro-patches omitted] diff --git a/src/python/merge_conflict_analysis_diffs/921/gitmerge_recursive_patience/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/921/gitmerge_recursive_patience/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..f1911744f6 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/921/gitmerge_recursive_patience/diff_VERSION-2.x.txt @@ -0,0 +1,278 @@ +==== +1:7,12c + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) +2:7,25c + <<<<<<< HEAD + 2.10.5.1 (02-Dec-2020) + ||||||| 3d2903e8a + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) + ======= + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + #2996: Block 2 more gadget types (placeholder) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) + >>>>>>> TEMP_RIGHT_BRANCH +3:7c + 2.10.5.1 (02-Dec-2020) +====1 +1:17,18c + #2854: Block one more gadget type (javax.swing, CVE-2020-xxx) + (reported by Yangkun(ICSL)) +2:29a +3:11a +====1 +1:20c + 2.9.10.6 (24-Aug-2020) +2:31,197c +3:13,179c + 2.10.5 (21-Jul-2020) + + #2787 (partial fix): NPE after add mixin for enum + (reported by Denis K) + + 2.10.4 (03-May-2020) + + #2679: `ObjectMapper.readValue("123", Void.TYPE)` throws "should never occur" + (reported by Endre S) + + 2.10.3 (03-Mar-2020) + + #2482: `JSONMappingException` `Location` column number is one line Behind the actual + location + (reported by Kamal A, fixed by Ivo S) + #2599: NoClassDefFoundError at DeserializationContext. on Android 4.1.2 + and Jackson 2.10.0 + (reported by Tobias P) + #2602: ByteBufferSerializer produces unexpected results with a duplicated ByteBuffer + and a position > 0 + (reported by Eduard T) + #2605: Failure to deserializer polymorphic subtypes of base type `Enum` + (reported by uewle@github) + #2610: `EXTERNAL_PROPERTY` doesn't work with `@JsonIgnoreProperties` + (reported, fix suggested by Alexander S) + + 2.10.2 (05-Jan-2020) + + #2101: `FAIL_ON_NULL_FOR_PRIMITIVES` failure does not indicate field name in exception message + (reported by raderio@github) + + 2.10.1 (09-Nov-2019) + + #2457: Extended enum values are not handled as enums when used as Map keys + (reported by Andrey K) + #2473: Array index missing in path of `JsonMappingException` for `Collection`, + with custom deserializer + (reported by João G) + #2475: `StringCollectionSerializer` calls `JsonGenerator.setCurrentValue(value)`, + which messes up current value for sibling properties + (reported by Ryan B) + #2485: Add `uses` for `Module` in module-info + (contributed by Marc M) + #2513: BigDecimalAsStringSerializer in NumberSerializer throws IllegalStateException in 2.10 + (reported by Johan H) + #2519: Serializing `BigDecimal` values inside containers ignores shape override + (reported by Richard W) + #2520: Sub-optimal exception message when failing to deserialize non-static inner classes + (reported by Mark S) + #2529: Add tests to ensure `EnumSet` and `EnumMap` work correctly with "null-as-empty" + #2534: Add `BasicPolymorphicTypeValidator.Builder.allowIfSubTypeIsArray()` + #2535: Allow String-to-byte[] coercion for String-value collections + + 2.10.0 (26-Sep-2019) + + #18: Make `JsonNode` serializable + #1093: Default typing does not work with `writerFor(Object.class)` + (reported by hoomanv@github) + #1675: Remove "impossible" `IOException` in `readTree()` and `readValue()` `ObjectMapper` + methods which accept Strings + (requested by matthew-pwnieexpress@github) + #1954: Add Builder pattern for creating configured `ObjectMapper` instances + #1995: Limit size of `DeserializerCache`, auto-flush on exceeding + #2059: Remove `final` modifier for `TypeFactory` + (requested by Thibaut R) + #2077: `JsonTypeInfo` with a subtype having `JsonFormat.Shape.ARRAY` and + no fields generates `{}` not `[]` + (reported by Sadayuki F) + #2115: Support naive deserialization of `Serializable` values as "untyped", same + as `java.lang.Object` + (requested by Christopher S) + #2116: Make NumberSerializers.Base public and its inherited classes not final + (requested by Édouard M) + #2126: `DeserializationContext.instantiationException()` throws `InvalidDefinitionException` + #2129: Add `SerializationFeature.WRITE_ENUM_KEYS_USING_INDEX`, separate from value setting + (suggested by renzihui@github) + #2133: Improve `DeserializationProblemHandler.handleUnexpectedToken()` to allow handling of + Collection problems + (contributed by Semyon L) + #2149: Add `MapperFeature.ACCEPT_CASE_INSENSITIVE_VALUES` + (suggested by Craig P) + #2153: Add `JsonMapper` to replace generic `ObjectMapper` usage + #2164: `FactoryBasedEnumDeserializer` does not respect + `DeserializationFeature.WRAP_EXCEPTIONS` + (reported by Yiqiu H) + #2187: Make `JsonNode.toString()` use shared `ObjectMapper` to produce valid json + #2189: `TreeTraversingParser` does not check int bounds + (reported by Alexander S) + #2195: Add abstraction `PolymorphicTypeValidator`, for limiting subtypes allowed by + default typing, `@JsonTypeInfo` + #2196: Type safety for `readValue()` with `TypeReference` + (suggested by nguyenfilip@github) + #2204: Add `JsonNode.isEmpty()` as convenience alias + #2211: Change of behavior (2.8 -> 2.9) with `ObjectMapper.readTree(input)` with no content + #2217: Suboptimal memory allocation in `TextNode.getBinaryValue()` + (reported by Christoph B) + #2220: Force serialization always for `convertValue()`; avoid short-cuts + #2223: Add `missingNode()` method in `JsonNodeFactory` + #2227: Minor cleanup of exception message for `Enum` binding failure + (reported by RightHandedMonkey@github) + #2230: `WRITE_BIGDECIMAL_AS_PLAIN` is ignored if `@JsonFormat` is used + (reported by Pavel C) + #2236: Type id not provided on `Double.NaN`, `Infinity` with `@JsonTypeInfo` + (reported by C-B-B@github) + #2237: Add "required" methods in `JsonNode`: `required(String | int)`, + `requiredAt(JsonPointer)` + #2241: Add `PropertyNamingStrategy.LOWER_DOT_CASE` for dot-delimited names + (contributed by zenglian@github.com) + #2251: Getter that returns an abstract collection breaks a delegating `@JsonCreator` + #2265: Inconsistent handling of Collections$UnmodifiableList vs Collections$UnmodifiableRandomAccessList + #2273: Add basic Java 9+ module info + #2280: JsonMerge not work with constructor args + (reported by Deblock T) + #2309: READ_ENUMS_USING_TO_STRING doesn't support null values + (reported, fix suggested by Ben A) + #2311: Unnecessary MultiView creation for property writers + (suggested by Manuel H) + #2331: `JsonMappingException` through nested getter with generic wildcard return type + (reported by sunchezz89@github) + #2336: `MapDeserializer` can not merge `Map`s with polymorphic values + (reported by Robert G) + #2338: Suboptimal return type for `JsonNode.withArray()` + (reported by Victor N) + #2339: Suboptimal return type for `ObjectNode.set()` + (reported by Victor N) + #2348: Add sanity checks for `ObjectMapper.readXXX()` methods + (requested by ebundy@github) + #2349: Add option `DefaultTyping.EVERYTHING` to support Kotlin data classes + #2357: Lack of path on MismatchedInputException + (suggested by TheEin@github) + #2378: `@JsonAlias` doesn't work with AutoValue + (reported by David H) + #2390: `Iterable` serialization breaks when adding `@JsonFilter` annotation + (reported by Chris M) + #2392: `BeanDeserializerModifier.modifyDeserializer()` not applied to custom bean deserializers + (reported by andreasbaus@github) + #2393: `TreeTraversingParser.getLongValue()` incorrectly checks `canConvertToInt()` + (reported by RabbidDog@github) + #2398: Replace recursion in `TokenBuffer.copyCurrentStructure()` with iteration + (reported by Sam S) + #2415: Builder-based POJO deserializer should pass builder instance, not type, + to `handleUnknownVanilla()` + (proposed by Vladimir T, follow up to #822) + #2416: Optimize `ValueInstantiator` construction for default `Collection`, `Map` types + #2422: `scala.collection.immutable.ListMap` fails to serialize since 2.9.3 + (reported by dejanlokar1@github) + #2424: Add global config override setting for `@JsonFormat.lenient()` + #2428: Use "activateDefaultTyping" over "enableDefaultTyping" in 2.10 with new methods + #2430: Change `ObjectMapper.valueToTree()` to convert `null` to `NullNode` + #2432: Add support for module bundles + (contributed by Marcos P) + #2433: Improve `NullNode.equals()` + (suggested by David B) + #2442: `ArrayNode.addAll()` adds raw `null` values which cause NPE on `deepCopy()` + and `toString()` + (reported, fix contributed by Hesham M) + #2446: Java 11: Unable to load JDK7 types (annotations, java.nio.file.Path): no Java7 support added + (reported by David C) + #2451: Add new `JsonValueFormat` value, `UUID` + #2453: Add `DeserializationContext.readTree(JsonParser)` convenience method + #2458: `Nulls` property metadata ignored for creators + (reported by XakepSDK@github) + #2466: Didn't find class "java.nio.file.Path" below Android api 26 + (reported by KevynBct@github) + #2467: Accept `JsonTypeInfo.As.WRAPPER_ARRAY` with no second argument to + deserialize as "null value" + (contributed by Martin C) +====1 +1:22,86c + #2798: Block one more gadget type (com.pastdev.httpcomponents, CVE-2020-24750) + (reported by Al1ex@knownsec) + #2814: Block one more gadget type (Anteros-DBCP, CVE-2020-24616) + (reported by ChenZhaojun) + #2826: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + #2827: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + + 2.9.10.5 (21-Jun-2020) + + #2688: Block one more gadget type (apache-drill, CVE-2020-14060) + (reported by Topsec(tcc)) + #2698: Block one more gadget type (weblogic/oracle-aqjms, CVE-2020-14061) + (reported by Fangrun Li) + #2704: Block one more gadget type (jaxp-ri, CVE-2020-14062) + (reported by XuYuanzhen) + #2765: Block one more gadget type (org.jsecurity, CVE-2020-14195) + (reported by Al1ex@knownsec) + + 2.9.10.4 (11-Apr-2020) + + #2631: Block one more gadget type (shaded-hikari-config, CVE-2020-9546) + (reported by threedr3am & LFY) + #2634: Block two more gadget types (ibatis-sqlmap, anteros-core; CVE-2020-9547 / CVE-2020-9548) + (reported by threedr3am & V1ZkRA) + #2642: Block one more gadget type (javax.swing, CVE-2020-10969) + (reported by threedr3am) + #2648: Block one more gadget type (shiro-core) + #2653: Block one more gadget type (shiro-core) + #2658: Block one more gadget type (ignite-jta, CVE-2020-10650) + (reported by Srikanth Ramu, threedr3am'follower) + #2659: Block one more gadget type (aries.transaction.jms, CVE-2020-10672) + (reported by Srikanth Ramu) + #2660: Block one more gadget type (caucho-quercus, CVE-2020-10673) + (reported by threedr3am'follower) + #2662: Block one more gadget type (bus-proxy, CVE-2020-10968) + (reported by XuYuanzhen) + #2664: Block one more gadget type (activemq-pool[-jms], CVE-2020-11111) + (reported by Srikanth Ramu) + #2666: Block one more gadget type (apache/commons-proxy, CVE-2020-11112) + (reported by Yiting Fan) + #2670: Block one more gadget type (openjpa, CVE-2020-11113) + (reported by XuYuanzhen) + #2680: Block one more gadget type (SSRF, spring-aop, CVE-2020-11619) + #2682: Block one more gadget type (commons-jelly, CVE-2020-11620) + + 2.9.10.3 (23-Feb-2020) + + #2620: Block one more gadget type (xbean-reflect/JNDI - CVE-2020-8840) + (reported by threedr3am@github) + + 2.9.10.2 (03-Jan-2020) + + #2526: Block two more gadget types (ehcache/JNDI - CVE-2019-20330) + (repoerted by UltramanGaia) + #2544: java.lang.NoClassDefFoundError Thrown for compact profile1 + (reported by Jon A) + + 2.9.10.1 (20-Oct-2019) + + #2478: Block two more gadget types (commons-dbcp, p6spy, + CVE-2019-16942 / CVE-2019-16943) + (reported by b5mali4 / root@codersec.net) + #2498: Block one more gadget type (log4j-extras/1.2, CVE-2019-17531) +2:199c +3:181c + [2.9.10.x micro-patches omitted] diff --git a/src/python/merge_conflict_analysis_diffs/921/intellimerge/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/921/intellimerge/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..f1911744f6 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/921/intellimerge/diff_VERSION-2.x.txt @@ -0,0 +1,278 @@ +==== +1:7,12c + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) +2:7,25c + <<<<<<< HEAD + 2.10.5.1 (02-Dec-2020) + ||||||| 3d2903e8a + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) + ======= + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + #2996: Block 2 more gadget types (placeholder) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) + >>>>>>> TEMP_RIGHT_BRANCH +3:7c + 2.10.5.1 (02-Dec-2020) +====1 +1:17,18c + #2854: Block one more gadget type (javax.swing, CVE-2020-xxx) + (reported by Yangkun(ICSL)) +2:29a +3:11a +====1 +1:20c + 2.9.10.6 (24-Aug-2020) +2:31,197c +3:13,179c + 2.10.5 (21-Jul-2020) + + #2787 (partial fix): NPE after add mixin for enum + (reported by Denis K) + + 2.10.4 (03-May-2020) + + #2679: `ObjectMapper.readValue("123", Void.TYPE)` throws "should never occur" + (reported by Endre S) + + 2.10.3 (03-Mar-2020) + + #2482: `JSONMappingException` `Location` column number is one line Behind the actual + location + (reported by Kamal A, fixed by Ivo S) + #2599: NoClassDefFoundError at DeserializationContext. on Android 4.1.2 + and Jackson 2.10.0 + (reported by Tobias P) + #2602: ByteBufferSerializer produces unexpected results with a duplicated ByteBuffer + and a position > 0 + (reported by Eduard T) + #2605: Failure to deserializer polymorphic subtypes of base type `Enum` + (reported by uewle@github) + #2610: `EXTERNAL_PROPERTY` doesn't work with `@JsonIgnoreProperties` + (reported, fix suggested by Alexander S) + + 2.10.2 (05-Jan-2020) + + #2101: `FAIL_ON_NULL_FOR_PRIMITIVES` failure does not indicate field name in exception message + (reported by raderio@github) + + 2.10.1 (09-Nov-2019) + + #2457: Extended enum values are not handled as enums when used as Map keys + (reported by Andrey K) + #2473: Array index missing in path of `JsonMappingException` for `Collection`, + with custom deserializer + (reported by João G) + #2475: `StringCollectionSerializer` calls `JsonGenerator.setCurrentValue(value)`, + which messes up current value for sibling properties + (reported by Ryan B) + #2485: Add `uses` for `Module` in module-info + (contributed by Marc M) + #2513: BigDecimalAsStringSerializer in NumberSerializer throws IllegalStateException in 2.10 + (reported by Johan H) + #2519: Serializing `BigDecimal` values inside containers ignores shape override + (reported by Richard W) + #2520: Sub-optimal exception message when failing to deserialize non-static inner classes + (reported by Mark S) + #2529: Add tests to ensure `EnumSet` and `EnumMap` work correctly with "null-as-empty" + #2534: Add `BasicPolymorphicTypeValidator.Builder.allowIfSubTypeIsArray()` + #2535: Allow String-to-byte[] coercion for String-value collections + + 2.10.0 (26-Sep-2019) + + #18: Make `JsonNode` serializable + #1093: Default typing does not work with `writerFor(Object.class)` + (reported by hoomanv@github) + #1675: Remove "impossible" `IOException` in `readTree()` and `readValue()` `ObjectMapper` + methods which accept Strings + (requested by matthew-pwnieexpress@github) + #1954: Add Builder pattern for creating configured `ObjectMapper` instances + #1995: Limit size of `DeserializerCache`, auto-flush on exceeding + #2059: Remove `final` modifier for `TypeFactory` + (requested by Thibaut R) + #2077: `JsonTypeInfo` with a subtype having `JsonFormat.Shape.ARRAY` and + no fields generates `{}` not `[]` + (reported by Sadayuki F) + #2115: Support naive deserialization of `Serializable` values as "untyped", same + as `java.lang.Object` + (requested by Christopher S) + #2116: Make NumberSerializers.Base public and its inherited classes not final + (requested by Édouard M) + #2126: `DeserializationContext.instantiationException()` throws `InvalidDefinitionException` + #2129: Add `SerializationFeature.WRITE_ENUM_KEYS_USING_INDEX`, separate from value setting + (suggested by renzihui@github) + #2133: Improve `DeserializationProblemHandler.handleUnexpectedToken()` to allow handling of + Collection problems + (contributed by Semyon L) + #2149: Add `MapperFeature.ACCEPT_CASE_INSENSITIVE_VALUES` + (suggested by Craig P) + #2153: Add `JsonMapper` to replace generic `ObjectMapper` usage + #2164: `FactoryBasedEnumDeserializer` does not respect + `DeserializationFeature.WRAP_EXCEPTIONS` + (reported by Yiqiu H) + #2187: Make `JsonNode.toString()` use shared `ObjectMapper` to produce valid json + #2189: `TreeTraversingParser` does not check int bounds + (reported by Alexander S) + #2195: Add abstraction `PolymorphicTypeValidator`, for limiting subtypes allowed by + default typing, `@JsonTypeInfo` + #2196: Type safety for `readValue()` with `TypeReference` + (suggested by nguyenfilip@github) + #2204: Add `JsonNode.isEmpty()` as convenience alias + #2211: Change of behavior (2.8 -> 2.9) with `ObjectMapper.readTree(input)` with no content + #2217: Suboptimal memory allocation in `TextNode.getBinaryValue()` + (reported by Christoph B) + #2220: Force serialization always for `convertValue()`; avoid short-cuts + #2223: Add `missingNode()` method in `JsonNodeFactory` + #2227: Minor cleanup of exception message for `Enum` binding failure + (reported by RightHandedMonkey@github) + #2230: `WRITE_BIGDECIMAL_AS_PLAIN` is ignored if `@JsonFormat` is used + (reported by Pavel C) + #2236: Type id not provided on `Double.NaN`, `Infinity` with `@JsonTypeInfo` + (reported by C-B-B@github) + #2237: Add "required" methods in `JsonNode`: `required(String | int)`, + `requiredAt(JsonPointer)` + #2241: Add `PropertyNamingStrategy.LOWER_DOT_CASE` for dot-delimited names + (contributed by zenglian@github.com) + #2251: Getter that returns an abstract collection breaks a delegating `@JsonCreator` + #2265: Inconsistent handling of Collections$UnmodifiableList vs Collections$UnmodifiableRandomAccessList + #2273: Add basic Java 9+ module info + #2280: JsonMerge not work with constructor args + (reported by Deblock T) + #2309: READ_ENUMS_USING_TO_STRING doesn't support null values + (reported, fix suggested by Ben A) + #2311: Unnecessary MultiView creation for property writers + (suggested by Manuel H) + #2331: `JsonMappingException` through nested getter with generic wildcard return type + (reported by sunchezz89@github) + #2336: `MapDeserializer` can not merge `Map`s with polymorphic values + (reported by Robert G) + #2338: Suboptimal return type for `JsonNode.withArray()` + (reported by Victor N) + #2339: Suboptimal return type for `ObjectNode.set()` + (reported by Victor N) + #2348: Add sanity checks for `ObjectMapper.readXXX()` methods + (requested by ebundy@github) + #2349: Add option `DefaultTyping.EVERYTHING` to support Kotlin data classes + #2357: Lack of path on MismatchedInputException + (suggested by TheEin@github) + #2378: `@JsonAlias` doesn't work with AutoValue + (reported by David H) + #2390: `Iterable` serialization breaks when adding `@JsonFilter` annotation + (reported by Chris M) + #2392: `BeanDeserializerModifier.modifyDeserializer()` not applied to custom bean deserializers + (reported by andreasbaus@github) + #2393: `TreeTraversingParser.getLongValue()` incorrectly checks `canConvertToInt()` + (reported by RabbidDog@github) + #2398: Replace recursion in `TokenBuffer.copyCurrentStructure()` with iteration + (reported by Sam S) + #2415: Builder-based POJO deserializer should pass builder instance, not type, + to `handleUnknownVanilla()` + (proposed by Vladimir T, follow up to #822) + #2416: Optimize `ValueInstantiator` construction for default `Collection`, `Map` types + #2422: `scala.collection.immutable.ListMap` fails to serialize since 2.9.3 + (reported by dejanlokar1@github) + #2424: Add global config override setting for `@JsonFormat.lenient()` + #2428: Use "activateDefaultTyping" over "enableDefaultTyping" in 2.10 with new methods + #2430: Change `ObjectMapper.valueToTree()` to convert `null` to `NullNode` + #2432: Add support for module bundles + (contributed by Marcos P) + #2433: Improve `NullNode.equals()` + (suggested by David B) + #2442: `ArrayNode.addAll()` adds raw `null` values which cause NPE on `deepCopy()` + and `toString()` + (reported, fix contributed by Hesham M) + #2446: Java 11: Unable to load JDK7 types (annotations, java.nio.file.Path): no Java7 support added + (reported by David C) + #2451: Add new `JsonValueFormat` value, `UUID` + #2453: Add `DeserializationContext.readTree(JsonParser)` convenience method + #2458: `Nulls` property metadata ignored for creators + (reported by XakepSDK@github) + #2466: Didn't find class "java.nio.file.Path" below Android api 26 + (reported by KevynBct@github) + #2467: Accept `JsonTypeInfo.As.WRAPPER_ARRAY` with no second argument to + deserialize as "null value" + (contributed by Martin C) +====1 +1:22,86c + #2798: Block one more gadget type (com.pastdev.httpcomponents, CVE-2020-24750) + (reported by Al1ex@knownsec) + #2814: Block one more gadget type (Anteros-DBCP, CVE-2020-24616) + (reported by ChenZhaojun) + #2826: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + #2827: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + + 2.9.10.5 (21-Jun-2020) + + #2688: Block one more gadget type (apache-drill, CVE-2020-14060) + (reported by Topsec(tcc)) + #2698: Block one more gadget type (weblogic/oracle-aqjms, CVE-2020-14061) + (reported by Fangrun Li) + #2704: Block one more gadget type (jaxp-ri, CVE-2020-14062) + (reported by XuYuanzhen) + #2765: Block one more gadget type (org.jsecurity, CVE-2020-14195) + (reported by Al1ex@knownsec) + + 2.9.10.4 (11-Apr-2020) + + #2631: Block one more gadget type (shaded-hikari-config, CVE-2020-9546) + (reported by threedr3am & LFY) + #2634: Block two more gadget types (ibatis-sqlmap, anteros-core; CVE-2020-9547 / CVE-2020-9548) + (reported by threedr3am & V1ZkRA) + #2642: Block one more gadget type (javax.swing, CVE-2020-10969) + (reported by threedr3am) + #2648: Block one more gadget type (shiro-core) + #2653: Block one more gadget type (shiro-core) + #2658: Block one more gadget type (ignite-jta, CVE-2020-10650) + (reported by Srikanth Ramu, threedr3am'follower) + #2659: Block one more gadget type (aries.transaction.jms, CVE-2020-10672) + (reported by Srikanth Ramu) + #2660: Block one more gadget type (caucho-quercus, CVE-2020-10673) + (reported by threedr3am'follower) + #2662: Block one more gadget type (bus-proxy, CVE-2020-10968) + (reported by XuYuanzhen) + #2664: Block one more gadget type (activemq-pool[-jms], CVE-2020-11111) + (reported by Srikanth Ramu) + #2666: Block one more gadget type (apache/commons-proxy, CVE-2020-11112) + (reported by Yiting Fan) + #2670: Block one more gadget type (openjpa, CVE-2020-11113) + (reported by XuYuanzhen) + #2680: Block one more gadget type (SSRF, spring-aop, CVE-2020-11619) + #2682: Block one more gadget type (commons-jelly, CVE-2020-11620) + + 2.9.10.3 (23-Feb-2020) + + #2620: Block one more gadget type (xbean-reflect/JNDI - CVE-2020-8840) + (reported by threedr3am@github) + + 2.9.10.2 (03-Jan-2020) + + #2526: Block two more gadget types (ehcache/JNDI - CVE-2019-20330) + (repoerted by UltramanGaia) + #2544: java.lang.NoClassDefFoundError Thrown for compact profile1 + (reported by Jon A) + + 2.9.10.1 (20-Oct-2019) + + #2478: Block two more gadget types (commons-dbcp, p6spy, + CVE-2019-16942 / CVE-2019-16943) + (reported by b5mali4 / root@codersec.net) + #2498: Block one more gadget type (log4j-extras/1.2, CVE-2019-17531) +2:199c +3:181c + [2.9.10.x micro-patches omitted] diff --git a/src/python/merge_conflict_analysis_diffs/921/spork/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/921/spork/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..67785841ec --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/921/spork/diff_VERSION-2.x.txt @@ -0,0 +1,259 @@ +====1 +1:7,12c + 2.9.10.8 (not yet released) + + #2986: Block two more gadget types (commons-dbcp2, CVE-2020-35490/CVE-2020-35491) + (reported by Al1ex@knownsec) + + 2.9.10.7 (02-Dec-2020) +2:7c +3:7c + 2.10.5.1 (02-Dec-2020) +====1 +1:17,18c + #2854: Block one more gadget type (javax.swing, CVE-2020-xxx) + (reported by Yangkun(ICSL)) +2:11a +3:11a +====1 +1:20c + 2.9.10.6 (24-Aug-2020) +2:13,179c +3:13,179c + 2.10.5 (21-Jul-2020) + + #2787 (partial fix): NPE after add mixin for enum + (reported by Denis K) + + 2.10.4 (03-May-2020) + + #2679: `ObjectMapper.readValue("123", Void.TYPE)` throws "should never occur" + (reported by Endre S) + + 2.10.3 (03-Mar-2020) + + #2482: `JSONMappingException` `Location` column number is one line Behind the actual + location + (reported by Kamal A, fixed by Ivo S) + #2599: NoClassDefFoundError at DeserializationContext. on Android 4.1.2 + and Jackson 2.10.0 + (reported by Tobias P) + #2602: ByteBufferSerializer produces unexpected results with a duplicated ByteBuffer + and a position > 0 + (reported by Eduard T) + #2605: Failure to deserializer polymorphic subtypes of base type `Enum` + (reported by uewle@github) + #2610: `EXTERNAL_PROPERTY` doesn't work with `@JsonIgnoreProperties` + (reported, fix suggested by Alexander S) + + 2.10.2 (05-Jan-2020) + + #2101: `FAIL_ON_NULL_FOR_PRIMITIVES` failure does not indicate field name in exception message + (reported by raderio@github) + + 2.10.1 (09-Nov-2019) + + #2457: Extended enum values are not handled as enums when used as Map keys + (reported by Andrey K) + #2473: Array index missing in path of `JsonMappingException` for `Collection`, + with custom deserializer + (reported by João G) + #2475: `StringCollectionSerializer` calls `JsonGenerator.setCurrentValue(value)`, + which messes up current value for sibling properties + (reported by Ryan B) + #2485: Add `uses` for `Module` in module-info + (contributed by Marc M) + #2513: BigDecimalAsStringSerializer in NumberSerializer throws IllegalStateException in 2.10 + (reported by Johan H) + #2519: Serializing `BigDecimal` values inside containers ignores shape override + (reported by Richard W) + #2520: Sub-optimal exception message when failing to deserialize non-static inner classes + (reported by Mark S) + #2529: Add tests to ensure `EnumSet` and `EnumMap` work correctly with "null-as-empty" + #2534: Add `BasicPolymorphicTypeValidator.Builder.allowIfSubTypeIsArray()` + #2535: Allow String-to-byte[] coercion for String-value collections + + 2.10.0 (26-Sep-2019) + + #18: Make `JsonNode` serializable + #1093: Default typing does not work with `writerFor(Object.class)` + (reported by hoomanv@github) + #1675: Remove "impossible" `IOException` in `readTree()` and `readValue()` `ObjectMapper` + methods which accept Strings + (requested by matthew-pwnieexpress@github) + #1954: Add Builder pattern for creating configured `ObjectMapper` instances + #1995: Limit size of `DeserializerCache`, auto-flush on exceeding + #2059: Remove `final` modifier for `TypeFactory` + (requested by Thibaut R) + #2077: `JsonTypeInfo` with a subtype having `JsonFormat.Shape.ARRAY` and + no fields generates `{}` not `[]` + (reported by Sadayuki F) + #2115: Support naive deserialization of `Serializable` values as "untyped", same + as `java.lang.Object` + (requested by Christopher S) + #2116: Make NumberSerializers.Base public and its inherited classes not final + (requested by Édouard M) + #2126: `DeserializationContext.instantiationException()` throws `InvalidDefinitionException` + #2129: Add `SerializationFeature.WRITE_ENUM_KEYS_USING_INDEX`, separate from value setting + (suggested by renzihui@github) + #2133: Improve `DeserializationProblemHandler.handleUnexpectedToken()` to allow handling of + Collection problems + (contributed by Semyon L) + #2149: Add `MapperFeature.ACCEPT_CASE_INSENSITIVE_VALUES` + (suggested by Craig P) + #2153: Add `JsonMapper` to replace generic `ObjectMapper` usage + #2164: `FactoryBasedEnumDeserializer` does not respect + `DeserializationFeature.WRAP_EXCEPTIONS` + (reported by Yiqiu H) + #2187: Make `JsonNode.toString()` use shared `ObjectMapper` to produce valid json + #2189: `TreeTraversingParser` does not check int bounds + (reported by Alexander S) + #2195: Add abstraction `PolymorphicTypeValidator`, for limiting subtypes allowed by + default typing, `@JsonTypeInfo` + #2196: Type safety for `readValue()` with `TypeReference` + (suggested by nguyenfilip@github) + #2204: Add `JsonNode.isEmpty()` as convenience alias + #2211: Change of behavior (2.8 -> 2.9) with `ObjectMapper.readTree(input)` with no content + #2217: Suboptimal memory allocation in `TextNode.getBinaryValue()` + (reported by Christoph B) + #2220: Force serialization always for `convertValue()`; avoid short-cuts + #2223: Add `missingNode()` method in `JsonNodeFactory` + #2227: Minor cleanup of exception message for `Enum` binding failure + (reported by RightHandedMonkey@github) + #2230: `WRITE_BIGDECIMAL_AS_PLAIN` is ignored if `@JsonFormat` is used + (reported by Pavel C) + #2236: Type id not provided on `Double.NaN`, `Infinity` with `@JsonTypeInfo` + (reported by C-B-B@github) + #2237: Add "required" methods in `JsonNode`: `required(String | int)`, + `requiredAt(JsonPointer)` + #2241: Add `PropertyNamingStrategy.LOWER_DOT_CASE` for dot-delimited names + (contributed by zenglian@github.com) + #2251: Getter that returns an abstract collection breaks a delegating `@JsonCreator` + #2265: Inconsistent handling of Collections$UnmodifiableList vs Collections$UnmodifiableRandomAccessList + #2273: Add basic Java 9+ module info + #2280: JsonMerge not work with constructor args + (reported by Deblock T) + #2309: READ_ENUMS_USING_TO_STRING doesn't support null values + (reported, fix suggested by Ben A) + #2311: Unnecessary MultiView creation for property writers + (suggested by Manuel H) + #2331: `JsonMappingException` through nested getter with generic wildcard return type + (reported by sunchezz89@github) + #2336: `MapDeserializer` can not merge `Map`s with polymorphic values + (reported by Robert G) + #2338: Suboptimal return type for `JsonNode.withArray()` + (reported by Victor N) + #2339: Suboptimal return type for `ObjectNode.set()` + (reported by Victor N) + #2348: Add sanity checks for `ObjectMapper.readXXX()` methods + (requested by ebundy@github) + #2349: Add option `DefaultTyping.EVERYTHING` to support Kotlin data classes + #2357: Lack of path on MismatchedInputException + (suggested by TheEin@github) + #2378: `@JsonAlias` doesn't work with AutoValue + (reported by David H) + #2390: `Iterable` serialization breaks when adding `@JsonFilter` annotation + (reported by Chris M) + #2392: `BeanDeserializerModifier.modifyDeserializer()` not applied to custom bean deserializers + (reported by andreasbaus@github) + #2393: `TreeTraversingParser.getLongValue()` incorrectly checks `canConvertToInt()` + (reported by RabbidDog@github) + #2398: Replace recursion in `TokenBuffer.copyCurrentStructure()` with iteration + (reported by Sam S) + #2415: Builder-based POJO deserializer should pass builder instance, not type, + to `handleUnknownVanilla()` + (proposed by Vladimir T, follow up to #822) + #2416: Optimize `ValueInstantiator` construction for default `Collection`, `Map` types + #2422: `scala.collection.immutable.ListMap` fails to serialize since 2.9.3 + (reported by dejanlokar1@github) + #2424: Add global config override setting for `@JsonFormat.lenient()` + #2428: Use "activateDefaultTyping" over "enableDefaultTyping" in 2.10 with new methods + #2430: Change `ObjectMapper.valueToTree()` to convert `null` to `NullNode` + #2432: Add support for module bundles + (contributed by Marcos P) + #2433: Improve `NullNode.equals()` + (suggested by David B) + #2442: `ArrayNode.addAll()` adds raw `null` values which cause NPE on `deepCopy()` + and `toString()` + (reported, fix contributed by Hesham M) + #2446: Java 11: Unable to load JDK7 types (annotations, java.nio.file.Path): no Java7 support added + (reported by David C) + #2451: Add new `JsonValueFormat` value, `UUID` + #2453: Add `DeserializationContext.readTree(JsonParser)` convenience method + #2458: `Nulls` property metadata ignored for creators + (reported by XakepSDK@github) + #2466: Didn't find class "java.nio.file.Path" below Android api 26 + (reported by KevynBct@github) + #2467: Accept `JsonTypeInfo.As.WRAPPER_ARRAY` with no second argument to + deserialize as "null value" + (contributed by Martin C) +====1 +1:22,86c + #2798: Block one more gadget type (com.pastdev.httpcomponents, CVE-2020-24750) + (reported by Al1ex@knownsec) + #2814: Block one more gadget type (Anteros-DBCP, CVE-2020-24616) + (reported by ChenZhaojun) + #2826: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + #2827: Block one more gadget type (xxx, CVE-xxxx-xxx) + (reported by ChenZhaojun) + + 2.9.10.5 (21-Jun-2020) + + #2688: Block one more gadget type (apache-drill, CVE-2020-14060) + (reported by Topsec(tcc)) + #2698: Block one more gadget type (weblogic/oracle-aqjms, CVE-2020-14061) + (reported by Fangrun Li) + #2704: Block one more gadget type (jaxp-ri, CVE-2020-14062) + (reported by XuYuanzhen) + #2765: Block one more gadget type (org.jsecurity, CVE-2020-14195) + (reported by Al1ex@knownsec) + + 2.9.10.4 (11-Apr-2020) + + #2631: Block one more gadget type (shaded-hikari-config, CVE-2020-9546) + (reported by threedr3am & LFY) + #2634: Block two more gadget types (ibatis-sqlmap, anteros-core; CVE-2020-9547 / CVE-2020-9548) + (reported by threedr3am & V1ZkRA) + #2642: Block one more gadget type (javax.swing, CVE-2020-10969) + (reported by threedr3am) + #2648: Block one more gadget type (shiro-core) + #2653: Block one more gadget type (shiro-core) + #2658: Block one more gadget type (ignite-jta, CVE-2020-10650) + (reported by Srikanth Ramu, threedr3am'follower) + #2659: Block one more gadget type (aries.transaction.jms, CVE-2020-10672) + (reported by Srikanth Ramu) + #2660: Block one more gadget type (caucho-quercus, CVE-2020-10673) + (reported by threedr3am'follower) + #2662: Block one more gadget type (bus-proxy, CVE-2020-10968) + (reported by XuYuanzhen) + #2664: Block one more gadget type (activemq-pool[-jms], CVE-2020-11111) + (reported by Srikanth Ramu) + #2666: Block one more gadget type (apache/commons-proxy, CVE-2020-11112) + (reported by Yiting Fan) + #2670: Block one more gadget type (openjpa, CVE-2020-11113) + (reported by XuYuanzhen) + #2680: Block one more gadget type (SSRF, spring-aop, CVE-2020-11619) + #2682: Block one more gadget type (commons-jelly, CVE-2020-11620) + + 2.9.10.3 (23-Feb-2020) + + #2620: Block one more gadget type (xbean-reflect/JNDI - CVE-2020-8840) + (reported by threedr3am@github) + + 2.9.10.2 (03-Jan-2020) + + #2526: Block two more gadget types (ehcache/JNDI - CVE-2019-20330) + (repoerted by UltramanGaia) + #2544: java.lang.NoClassDefFoundError Thrown for compact profile1 + (reported by Jon A) + + 2.9.10.1 (20-Oct-2019) + + #2478: Block two more gadget types (commons-dbcp, p6spy, + CVE-2019-16942 / CVE-2019-16943) + (reported by b5mali4 / root@codersec.net) + #2498: Block one more gadget type (log4j-extras/1.2, CVE-2019-17531) +2:181c +3:181c + [2.9.10.x micro-patches omitted] diff --git a/src/python/merge_conflict_analysis_diffs/942/git_hires_merge/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/942/git_hires_merge/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..a9564cff37 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/942/git_hires_merge/diff_VERSION-2.x.txt @@ -0,0 +1,8 @@ +6a7,11 +> 2.9.9.2 (not yet released) +> +> #2387: Block yet another deserialization gadget (EHCache, CVE-2019-xxxxx?) +> #2389: Block yet another deserialization gadget (Logback, CVE-2019-xxxxx?) +> +31a37 +> <<<<<<< HEAD:release-notes/VERSION-2.x diff --git a/src/python/merge_conflict_analysis_diffs/942/gitmerge_ort/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/942/gitmerge_ort/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..d1092d41bb --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/942/gitmerge_ort/diff_VERSION-2.x.txt @@ -0,0 +1,16 @@ +6a7,11 +> 2.9.9.2 (not yet released) +> +> #2387: Block yet another deserialization gadget (EHCache, CVE-2019-xxxxx?) +> #2389: Block yet another deserialization gadget (Logback, CVE-2019-xxxxx?) +> +345,353d349 +< ||||||| 862fca923:release-notes/VERSION +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< ======= +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< #2387: Block yet another deserialization gadget (EHCache, CVE-2019-xxxxx?) +< #2389: Block yet another deserialization gadget (Logback, CVE-2019-xxxxx?) +< >>>>>>> TEMP_RIGHT_BRANCH:release-notes/VERSION diff --git a/src/python/merge_conflict_analysis_diffs/942/gitmerge_ort_adjacent/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/942/gitmerge_ort_adjacent/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..a9564cff37 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/942/gitmerge_ort_adjacent/diff_VERSION-2.x.txt @@ -0,0 +1,8 @@ +6a7,11 +> 2.9.9.2 (not yet released) +> +> #2387: Block yet another deserialization gadget (EHCache, CVE-2019-xxxxx?) +> #2389: Block yet another deserialization gadget (Logback, CVE-2019-xxxxx?) +> +31a37 +> <<<<<<< HEAD:release-notes/VERSION-2.x diff --git a/src/python/merge_conflict_analysis_diffs/942/gitmerge_ort_ignorespace/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/942/gitmerge_ort_ignorespace/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..d1092d41bb --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/942/gitmerge_ort_ignorespace/diff_VERSION-2.x.txt @@ -0,0 +1,16 @@ +6a7,11 +> 2.9.9.2 (not yet released) +> +> #2387: Block yet another deserialization gadget (EHCache, CVE-2019-xxxxx?) +> #2389: Block yet another deserialization gadget (Logback, CVE-2019-xxxxx?) +> +345,353d349 +< ||||||| 862fca923:release-notes/VERSION +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< ======= +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< #2387: Block yet another deserialization gadget (EHCache, CVE-2019-xxxxx?) +< #2389: Block yet another deserialization gadget (Logback, CVE-2019-xxxxx?) +< >>>>>>> TEMP_RIGHT_BRANCH:release-notes/VERSION diff --git a/src/python/merge_conflict_analysis_diffs/942/gitmerge_ort_imports/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/942/gitmerge_ort_imports/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..a9564cff37 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/942/gitmerge_ort_imports/diff_VERSION-2.x.txt @@ -0,0 +1,8 @@ +6a7,11 +> 2.9.9.2 (not yet released) +> +> #2387: Block yet another deserialization gadget (EHCache, CVE-2019-xxxxx?) +> #2389: Block yet another deserialization gadget (Logback, CVE-2019-xxxxx?) +> +31a37 +> <<<<<<< HEAD:release-notes/VERSION-2.x diff --git a/src/python/merge_conflict_analysis_diffs/942/gitmerge_ort_imports_ignorespace/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/942/gitmerge_ort_imports_ignorespace/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..a9564cff37 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/942/gitmerge_ort_imports_ignorespace/diff_VERSION-2.x.txt @@ -0,0 +1,8 @@ +6a7,11 +> 2.9.9.2 (not yet released) +> +> #2387: Block yet another deserialization gadget (EHCache, CVE-2019-xxxxx?) +> #2389: Block yet another deserialization gadget (Logback, CVE-2019-xxxxx?) +> +31a37 +> <<<<<<< HEAD:release-notes/VERSION-2.x diff --git a/src/python/merge_conflict_analysis_diffs/942/gitmerge_recursive_histogram/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/942/gitmerge_recursive_histogram/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..d1092d41bb --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/942/gitmerge_recursive_histogram/diff_VERSION-2.x.txt @@ -0,0 +1,16 @@ +6a7,11 +> 2.9.9.2 (not yet released) +> +> #2387: Block yet another deserialization gadget (EHCache, CVE-2019-xxxxx?) +> #2389: Block yet another deserialization gadget (Logback, CVE-2019-xxxxx?) +> +345,353d349 +< ||||||| 862fca923:release-notes/VERSION +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< ======= +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< #2387: Block yet another deserialization gadget (EHCache, CVE-2019-xxxxx?) +< #2389: Block yet another deserialization gadget (Logback, CVE-2019-xxxxx?) +< >>>>>>> TEMP_RIGHT_BRANCH:release-notes/VERSION diff --git a/src/python/merge_conflict_analysis_diffs/942/gitmerge_recursive_ignorespace/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/942/gitmerge_recursive_ignorespace/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..d1092d41bb --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/942/gitmerge_recursive_ignorespace/diff_VERSION-2.x.txt @@ -0,0 +1,16 @@ +6a7,11 +> 2.9.9.2 (not yet released) +> +> #2387: Block yet another deserialization gadget (EHCache, CVE-2019-xxxxx?) +> #2389: Block yet another deserialization gadget (Logback, CVE-2019-xxxxx?) +> +345,353d349 +< ||||||| 862fca923:release-notes/VERSION +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< ======= +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< #2387: Block yet another deserialization gadget (EHCache, CVE-2019-xxxxx?) +< #2389: Block yet another deserialization gadget (Logback, CVE-2019-xxxxx?) +< >>>>>>> TEMP_RIGHT_BRANCH:release-notes/VERSION diff --git a/src/python/merge_conflict_analysis_diffs/942/gitmerge_recursive_minimal/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/942/gitmerge_recursive_minimal/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..d1092d41bb --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/942/gitmerge_recursive_minimal/diff_VERSION-2.x.txt @@ -0,0 +1,16 @@ +6a7,11 +> 2.9.9.2 (not yet released) +> +> #2387: Block yet another deserialization gadget (EHCache, CVE-2019-xxxxx?) +> #2389: Block yet another deserialization gadget (Logback, CVE-2019-xxxxx?) +> +345,353d349 +< ||||||| 862fca923:release-notes/VERSION +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< ======= +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< #2387: Block yet another deserialization gadget (EHCache, CVE-2019-xxxxx?) +< #2389: Block yet another deserialization gadget (Logback, CVE-2019-xxxxx?) +< >>>>>>> TEMP_RIGHT_BRANCH:release-notes/VERSION diff --git a/src/python/merge_conflict_analysis_diffs/942/gitmerge_recursive_myers/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/942/gitmerge_recursive_myers/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..d1092d41bb --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/942/gitmerge_recursive_myers/diff_VERSION-2.x.txt @@ -0,0 +1,16 @@ +6a7,11 +> 2.9.9.2 (not yet released) +> +> #2387: Block yet another deserialization gadget (EHCache, CVE-2019-xxxxx?) +> #2389: Block yet another deserialization gadget (Logback, CVE-2019-xxxxx?) +> +345,353d349 +< ||||||| 862fca923:release-notes/VERSION +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< ======= +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< #2387: Block yet another deserialization gadget (EHCache, CVE-2019-xxxxx?) +< #2389: Block yet another deserialization gadget (Logback, CVE-2019-xxxxx?) +< >>>>>>> TEMP_RIGHT_BRANCH:release-notes/VERSION diff --git a/src/python/merge_conflict_analysis_diffs/942/gitmerge_recursive_patience/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/942/gitmerge_recursive_patience/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..d1092d41bb --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/942/gitmerge_recursive_patience/diff_VERSION-2.x.txt @@ -0,0 +1,16 @@ +6a7,11 +> 2.9.9.2 (not yet released) +> +> #2387: Block yet another deserialization gadget (EHCache, CVE-2019-xxxxx?) +> #2389: Block yet another deserialization gadget (Logback, CVE-2019-xxxxx?) +> +345,353d349 +< ||||||| 862fca923:release-notes/VERSION +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< ======= +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< #2387: Block yet another deserialization gadget (EHCache, CVE-2019-xxxxx?) +< #2389: Block yet another deserialization gadget (Logback, CVE-2019-xxxxx?) +< >>>>>>> TEMP_RIGHT_BRANCH:release-notes/VERSION diff --git a/src/python/merge_conflict_analysis_diffs/942/intellimerge/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/942/intellimerge/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..d1092d41bb --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/942/intellimerge/diff_VERSION-2.x.txt @@ -0,0 +1,16 @@ +6a7,11 +> 2.9.9.2 (not yet released) +> +> #2387: Block yet another deserialization gadget (EHCache, CVE-2019-xxxxx?) +> #2389: Block yet another deserialization gadget (Logback, CVE-2019-xxxxx?) +> +345,353d349 +< ||||||| 862fca923:release-notes/VERSION +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< ======= +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< #2387: Block yet another deserialization gadget (EHCache, CVE-2019-xxxxx?) +< #2389: Block yet another deserialization gadget (Logback, CVE-2019-xxxxx?) +< >>>>>>> TEMP_RIGHT_BRANCH:release-notes/VERSION diff --git a/src/python/merge_conflict_analysis_diffs/942/spork/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/942/spork/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..a9564cff37 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/942/spork/diff_VERSION-2.x.txt @@ -0,0 +1,8 @@ +6a7,11 +> 2.9.9.2 (not yet released) +> +> #2387: Block yet another deserialization gadget (EHCache, CVE-2019-xxxxx?) +> #2389: Block yet another deserialization gadget (Logback, CVE-2019-xxxxx?) +> +31a37 +> <<<<<<< HEAD:release-notes/VERSION-2.x diff --git a/src/python/merge_conflict_analysis_diffs/943/git_hires_merge/diff_SubTypeValidator.java.txt b/src/python/merge_conflict_analysis_diffs/943/git_hires_merge/diff_SubTypeValidator.java.txt new file mode 100644 index 0000000000..8ca33c8643 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/git_hires_merge/diff_SubTypeValidator.java.txt @@ -0,0 +1,51 @@ +====1 +1:6a +2:7c +3:7c + import com.fasterxml.jackson.databind.BeanDescription; +====1 +1:69,71c + // [databind#1899]: more 3rd party + s.add("org.hibernate.jmx.StatisticsService"); + s.add("org.apache.ibatis.datasource.jndi.JndiDataSourceFactory"); +2:69a +3:69a +====1 +1:85,86c + // [databind#2326] + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +2:83,84c +3:83,84c + // [databind#2326] (2.9.9) + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +==== +1:88c + // [databind#2334] +2:86c + // [databind#2334] (2.9.9.1) +3:86c + // [databind#2334]: logback-core (2.9.9.1) +====3 +1:90a +2:88a +3:89,92c + // [databind#2341]: jdom/jdom2 (2.9.9.1) + s.add("org.jdom.transform.XSLTransformer"); + s.add("org.jdom2.transform.XSLTransformer"); + +====1 +1:105,106c + public void validateSubType(DeserializationContext ctxt, JavaType type) + throws JsonMappingException +2:103,104c +3:107,108c + public void validateSubType(DeserializationContext ctxt, JavaType type, + BeanDescription beanDesc) throws JsonMappingException +====1 +1:148,149c + throw JsonMappingException.from(ctxt, + String.format("Illegal type (%s) to deserialize: prevented for security reasons", full)); +2:146,147c +3:150,151c + ctxt.reportBadTypeDefinition(beanDesc, + "Illegal type (%s) to deserialize: prevented for security reasons", full); diff --git a/src/python/merge_conflict_analysis_diffs/943/git_hires_merge/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/943/git_hires_merge/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..1931fae61d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/git_hires_merge/diff_VERSION-2.x.txt @@ -0,0 +1,2 @@ +9a10 +> #2341: Block one more gadget type (CVE-2019-12814) diff --git a/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort/diff_SubTypeValidator.java.txt b/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort/diff_SubTypeValidator.java.txt new file mode 100644 index 0000000000..4d1ebecad3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort/diff_SubTypeValidator.java.txt @@ -0,0 +1,61 @@ +====1 +1:6a +2:7c +3:7c + import com.fasterxml.jackson.databind.BeanDescription; +====1 +1:69,71c + // [databind#1899]: more 3rd party + s.add("org.hibernate.jmx.StatisticsService"); + s.add("org.apache.ibatis.datasource.jndi.JndiDataSourceFactory"); +2:69a +3:69a +====1 +1:85,86c + // [databind#2326] + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +2:83,84c +3:83,84c + // [databind#2326] (2.9.9) + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +==== +1:88c + // [databind#2334] +2:86,92c + <<<<<<< HEAD + // [databind#2334] (2.9.9.1) + ||||||| 4c935660a + // [databind#2334] + ======= + // [databind#2334]: logback-core + >>>>>>> TEMP_RIGHT_BRANCH +3:86c + // [databind#2334]: logback-core (2.9.9.1) +==== +1:90a +2:95,98c + // [databind#2341]: jdom/jdom2 + s.add("org.jdom.transform.XSLTransformer"); + s.add("org.jdom2.transform.XSLTransformer"); + +3:89,92c + // [databind#2341]: jdom/jdom2 (2.9.9.1) + s.add("org.jdom.transform.XSLTransformer"); + s.add("org.jdom2.transform.XSLTransformer"); + +====1 +1:105,106c + public void validateSubType(DeserializationContext ctxt, JavaType type) + throws JsonMappingException +2:113,114c +3:107,108c + public void validateSubType(DeserializationContext ctxt, JavaType type, + BeanDescription beanDesc) throws JsonMappingException +====1 +1:148,149c + throw JsonMappingException.from(ctxt, + String.format("Illegal type (%s) to deserialize: prevented for security reasons", full)); +2:156,157c +3:150,151c + ctxt.reportBadTypeDefinition(beanDesc, + "Illegal type (%s) to deserialize: prevented for security reasons", full); diff --git a/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..dcdf470ac9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort/diff_VERSION-2.x.txt @@ -0,0 +1,11 @@ +9a10 +> #2341: Block one more gadget type (CVE-2019-12814) +28d28 +< <<<<<<< HEAD:release-notes/VERSION-2.x +341,346d340 +< ||||||| 4c935660a:release-notes/VERSION +< #2334: Block class for CVE-2019-12384 +< ======= +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< >>>>>>> TEMP_RIGHT_BRANCH:release-notes/VERSION diff --git a/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_adjacent/diff_SubTypeValidator.java.txt b/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_adjacent/diff_SubTypeValidator.java.txt new file mode 100644 index 0000000000..8ca33c8643 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_adjacent/diff_SubTypeValidator.java.txt @@ -0,0 +1,51 @@ +====1 +1:6a +2:7c +3:7c + import com.fasterxml.jackson.databind.BeanDescription; +====1 +1:69,71c + // [databind#1899]: more 3rd party + s.add("org.hibernate.jmx.StatisticsService"); + s.add("org.apache.ibatis.datasource.jndi.JndiDataSourceFactory"); +2:69a +3:69a +====1 +1:85,86c + // [databind#2326] + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +2:83,84c +3:83,84c + // [databind#2326] (2.9.9) + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +==== +1:88c + // [databind#2334] +2:86c + // [databind#2334] (2.9.9.1) +3:86c + // [databind#2334]: logback-core (2.9.9.1) +====3 +1:90a +2:88a +3:89,92c + // [databind#2341]: jdom/jdom2 (2.9.9.1) + s.add("org.jdom.transform.XSLTransformer"); + s.add("org.jdom2.transform.XSLTransformer"); + +====1 +1:105,106c + public void validateSubType(DeserializationContext ctxt, JavaType type) + throws JsonMappingException +2:103,104c +3:107,108c + public void validateSubType(DeserializationContext ctxt, JavaType type, + BeanDescription beanDesc) throws JsonMappingException +====1 +1:148,149c + throw JsonMappingException.from(ctxt, + String.format("Illegal type (%s) to deserialize: prevented for security reasons", full)); +2:146,147c +3:150,151c + ctxt.reportBadTypeDefinition(beanDesc, + "Illegal type (%s) to deserialize: prevented for security reasons", full); diff --git a/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_adjacent/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_adjacent/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..1931fae61d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_adjacent/diff_VERSION-2.x.txt @@ -0,0 +1,2 @@ +9a10 +> #2341: Block one more gadget type (CVE-2019-12814) diff --git a/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_ignorespace/diff_SubTypeValidator.java.txt b/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_ignorespace/diff_SubTypeValidator.java.txt new file mode 100644 index 0000000000..4d1ebecad3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_ignorespace/diff_SubTypeValidator.java.txt @@ -0,0 +1,61 @@ +====1 +1:6a +2:7c +3:7c + import com.fasterxml.jackson.databind.BeanDescription; +====1 +1:69,71c + // [databind#1899]: more 3rd party + s.add("org.hibernate.jmx.StatisticsService"); + s.add("org.apache.ibatis.datasource.jndi.JndiDataSourceFactory"); +2:69a +3:69a +====1 +1:85,86c + // [databind#2326] + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +2:83,84c +3:83,84c + // [databind#2326] (2.9.9) + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +==== +1:88c + // [databind#2334] +2:86,92c + <<<<<<< HEAD + // [databind#2334] (2.9.9.1) + ||||||| 4c935660a + // [databind#2334] + ======= + // [databind#2334]: logback-core + >>>>>>> TEMP_RIGHT_BRANCH +3:86c + // [databind#2334]: logback-core (2.9.9.1) +==== +1:90a +2:95,98c + // [databind#2341]: jdom/jdom2 + s.add("org.jdom.transform.XSLTransformer"); + s.add("org.jdom2.transform.XSLTransformer"); + +3:89,92c + // [databind#2341]: jdom/jdom2 (2.9.9.1) + s.add("org.jdom.transform.XSLTransformer"); + s.add("org.jdom2.transform.XSLTransformer"); + +====1 +1:105,106c + public void validateSubType(DeserializationContext ctxt, JavaType type) + throws JsonMappingException +2:113,114c +3:107,108c + public void validateSubType(DeserializationContext ctxt, JavaType type, + BeanDescription beanDesc) throws JsonMappingException +====1 +1:148,149c + throw JsonMappingException.from(ctxt, + String.format("Illegal type (%s) to deserialize: prevented for security reasons", full)); +2:156,157c +3:150,151c + ctxt.reportBadTypeDefinition(beanDesc, + "Illegal type (%s) to deserialize: prevented for security reasons", full); diff --git a/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_ignorespace/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_ignorespace/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..dcdf470ac9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_ignorespace/diff_VERSION-2.x.txt @@ -0,0 +1,11 @@ +9a10 +> #2341: Block one more gadget type (CVE-2019-12814) +28d28 +< <<<<<<< HEAD:release-notes/VERSION-2.x +341,346d340 +< ||||||| 4c935660a:release-notes/VERSION +< #2334: Block class for CVE-2019-12384 +< ======= +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< >>>>>>> TEMP_RIGHT_BRANCH:release-notes/VERSION diff --git a/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_imports/diff_SubTypeValidator.java.txt b/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_imports/diff_SubTypeValidator.java.txt new file mode 100644 index 0000000000..8ca33c8643 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_imports/diff_SubTypeValidator.java.txt @@ -0,0 +1,51 @@ +====1 +1:6a +2:7c +3:7c + import com.fasterxml.jackson.databind.BeanDescription; +====1 +1:69,71c + // [databind#1899]: more 3rd party + s.add("org.hibernate.jmx.StatisticsService"); + s.add("org.apache.ibatis.datasource.jndi.JndiDataSourceFactory"); +2:69a +3:69a +====1 +1:85,86c + // [databind#2326] + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +2:83,84c +3:83,84c + // [databind#2326] (2.9.9) + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +==== +1:88c + // [databind#2334] +2:86c + // [databind#2334] (2.9.9.1) +3:86c + // [databind#2334]: logback-core (2.9.9.1) +====3 +1:90a +2:88a +3:89,92c + // [databind#2341]: jdom/jdom2 (2.9.9.1) + s.add("org.jdom.transform.XSLTransformer"); + s.add("org.jdom2.transform.XSLTransformer"); + +====1 +1:105,106c + public void validateSubType(DeserializationContext ctxt, JavaType type) + throws JsonMappingException +2:103,104c +3:107,108c + public void validateSubType(DeserializationContext ctxt, JavaType type, + BeanDescription beanDesc) throws JsonMappingException +====1 +1:148,149c + throw JsonMappingException.from(ctxt, + String.format("Illegal type (%s) to deserialize: prevented for security reasons", full)); +2:146,147c +3:150,151c + ctxt.reportBadTypeDefinition(beanDesc, + "Illegal type (%s) to deserialize: prevented for security reasons", full); diff --git a/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_imports/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_imports/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..1931fae61d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_imports/diff_VERSION-2.x.txt @@ -0,0 +1,2 @@ +9a10 +> #2341: Block one more gadget type (CVE-2019-12814) diff --git a/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_imports_ignorespace/diff_SubTypeValidator.java.txt b/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_imports_ignorespace/diff_SubTypeValidator.java.txt new file mode 100644 index 0000000000..8ca33c8643 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_imports_ignorespace/diff_SubTypeValidator.java.txt @@ -0,0 +1,51 @@ +====1 +1:6a +2:7c +3:7c + import com.fasterxml.jackson.databind.BeanDescription; +====1 +1:69,71c + // [databind#1899]: more 3rd party + s.add("org.hibernate.jmx.StatisticsService"); + s.add("org.apache.ibatis.datasource.jndi.JndiDataSourceFactory"); +2:69a +3:69a +====1 +1:85,86c + // [databind#2326] + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +2:83,84c +3:83,84c + // [databind#2326] (2.9.9) + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +==== +1:88c + // [databind#2334] +2:86c + // [databind#2334] (2.9.9.1) +3:86c + // [databind#2334]: logback-core (2.9.9.1) +====3 +1:90a +2:88a +3:89,92c + // [databind#2341]: jdom/jdom2 (2.9.9.1) + s.add("org.jdom.transform.XSLTransformer"); + s.add("org.jdom2.transform.XSLTransformer"); + +====1 +1:105,106c + public void validateSubType(DeserializationContext ctxt, JavaType type) + throws JsonMappingException +2:103,104c +3:107,108c + public void validateSubType(DeserializationContext ctxt, JavaType type, + BeanDescription beanDesc) throws JsonMappingException +====1 +1:148,149c + throw JsonMappingException.from(ctxt, + String.format("Illegal type (%s) to deserialize: prevented for security reasons", full)); +2:146,147c +3:150,151c + ctxt.reportBadTypeDefinition(beanDesc, + "Illegal type (%s) to deserialize: prevented for security reasons", full); diff --git a/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_imports_ignorespace/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_imports_ignorespace/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..1931fae61d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/gitmerge_ort_imports_ignorespace/diff_VERSION-2.x.txt @@ -0,0 +1,2 @@ +9a10 +> #2341: Block one more gadget type (CVE-2019-12814) diff --git a/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_histogram/diff_SubTypeValidator.java.txt b/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_histogram/diff_SubTypeValidator.java.txt new file mode 100644 index 0000000000..4d1ebecad3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_histogram/diff_SubTypeValidator.java.txt @@ -0,0 +1,61 @@ +====1 +1:6a +2:7c +3:7c + import com.fasterxml.jackson.databind.BeanDescription; +====1 +1:69,71c + // [databind#1899]: more 3rd party + s.add("org.hibernate.jmx.StatisticsService"); + s.add("org.apache.ibatis.datasource.jndi.JndiDataSourceFactory"); +2:69a +3:69a +====1 +1:85,86c + // [databind#2326] + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +2:83,84c +3:83,84c + // [databind#2326] (2.9.9) + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +==== +1:88c + // [databind#2334] +2:86,92c + <<<<<<< HEAD + // [databind#2334] (2.9.9.1) + ||||||| 4c935660a + // [databind#2334] + ======= + // [databind#2334]: logback-core + >>>>>>> TEMP_RIGHT_BRANCH +3:86c + // [databind#2334]: logback-core (2.9.9.1) +==== +1:90a +2:95,98c + // [databind#2341]: jdom/jdom2 + s.add("org.jdom.transform.XSLTransformer"); + s.add("org.jdom2.transform.XSLTransformer"); + +3:89,92c + // [databind#2341]: jdom/jdom2 (2.9.9.1) + s.add("org.jdom.transform.XSLTransformer"); + s.add("org.jdom2.transform.XSLTransformer"); + +====1 +1:105,106c + public void validateSubType(DeserializationContext ctxt, JavaType type) + throws JsonMappingException +2:113,114c +3:107,108c + public void validateSubType(DeserializationContext ctxt, JavaType type, + BeanDescription beanDesc) throws JsonMappingException +====1 +1:148,149c + throw JsonMappingException.from(ctxt, + String.format("Illegal type (%s) to deserialize: prevented for security reasons", full)); +2:156,157c +3:150,151c + ctxt.reportBadTypeDefinition(beanDesc, + "Illegal type (%s) to deserialize: prevented for security reasons", full); diff --git a/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_histogram/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_histogram/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..dcdf470ac9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_histogram/diff_VERSION-2.x.txt @@ -0,0 +1,11 @@ +9a10 +> #2341: Block one more gadget type (CVE-2019-12814) +28d28 +< <<<<<<< HEAD:release-notes/VERSION-2.x +341,346d340 +< ||||||| 4c935660a:release-notes/VERSION +< #2334: Block class for CVE-2019-12384 +< ======= +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< >>>>>>> TEMP_RIGHT_BRANCH:release-notes/VERSION diff --git a/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_ignorespace/diff_SubTypeValidator.java.txt b/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_ignorespace/diff_SubTypeValidator.java.txt new file mode 100644 index 0000000000..4d1ebecad3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_ignorespace/diff_SubTypeValidator.java.txt @@ -0,0 +1,61 @@ +====1 +1:6a +2:7c +3:7c + import com.fasterxml.jackson.databind.BeanDescription; +====1 +1:69,71c + // [databind#1899]: more 3rd party + s.add("org.hibernate.jmx.StatisticsService"); + s.add("org.apache.ibatis.datasource.jndi.JndiDataSourceFactory"); +2:69a +3:69a +====1 +1:85,86c + // [databind#2326] + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +2:83,84c +3:83,84c + // [databind#2326] (2.9.9) + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +==== +1:88c + // [databind#2334] +2:86,92c + <<<<<<< HEAD + // [databind#2334] (2.9.9.1) + ||||||| 4c935660a + // [databind#2334] + ======= + // [databind#2334]: logback-core + >>>>>>> TEMP_RIGHT_BRANCH +3:86c + // [databind#2334]: logback-core (2.9.9.1) +==== +1:90a +2:95,98c + // [databind#2341]: jdom/jdom2 + s.add("org.jdom.transform.XSLTransformer"); + s.add("org.jdom2.transform.XSLTransformer"); + +3:89,92c + // [databind#2341]: jdom/jdom2 (2.9.9.1) + s.add("org.jdom.transform.XSLTransformer"); + s.add("org.jdom2.transform.XSLTransformer"); + +====1 +1:105,106c + public void validateSubType(DeserializationContext ctxt, JavaType type) + throws JsonMappingException +2:113,114c +3:107,108c + public void validateSubType(DeserializationContext ctxt, JavaType type, + BeanDescription beanDesc) throws JsonMappingException +====1 +1:148,149c + throw JsonMappingException.from(ctxt, + String.format("Illegal type (%s) to deserialize: prevented for security reasons", full)); +2:156,157c +3:150,151c + ctxt.reportBadTypeDefinition(beanDesc, + "Illegal type (%s) to deserialize: prevented for security reasons", full); diff --git a/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_ignorespace/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_ignorespace/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..dcdf470ac9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_ignorespace/diff_VERSION-2.x.txt @@ -0,0 +1,11 @@ +9a10 +> #2341: Block one more gadget type (CVE-2019-12814) +28d28 +< <<<<<<< HEAD:release-notes/VERSION-2.x +341,346d340 +< ||||||| 4c935660a:release-notes/VERSION +< #2334: Block class for CVE-2019-12384 +< ======= +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< >>>>>>> TEMP_RIGHT_BRANCH:release-notes/VERSION diff --git a/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_minimal/diff_SubTypeValidator.java.txt b/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_minimal/diff_SubTypeValidator.java.txt new file mode 100644 index 0000000000..4d1ebecad3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_minimal/diff_SubTypeValidator.java.txt @@ -0,0 +1,61 @@ +====1 +1:6a +2:7c +3:7c + import com.fasterxml.jackson.databind.BeanDescription; +====1 +1:69,71c + // [databind#1899]: more 3rd party + s.add("org.hibernate.jmx.StatisticsService"); + s.add("org.apache.ibatis.datasource.jndi.JndiDataSourceFactory"); +2:69a +3:69a +====1 +1:85,86c + // [databind#2326] + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +2:83,84c +3:83,84c + // [databind#2326] (2.9.9) + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +==== +1:88c + // [databind#2334] +2:86,92c + <<<<<<< HEAD + // [databind#2334] (2.9.9.1) + ||||||| 4c935660a + // [databind#2334] + ======= + // [databind#2334]: logback-core + >>>>>>> TEMP_RIGHT_BRANCH +3:86c + // [databind#2334]: logback-core (2.9.9.1) +==== +1:90a +2:95,98c + // [databind#2341]: jdom/jdom2 + s.add("org.jdom.transform.XSLTransformer"); + s.add("org.jdom2.transform.XSLTransformer"); + +3:89,92c + // [databind#2341]: jdom/jdom2 (2.9.9.1) + s.add("org.jdom.transform.XSLTransformer"); + s.add("org.jdom2.transform.XSLTransformer"); + +====1 +1:105,106c + public void validateSubType(DeserializationContext ctxt, JavaType type) + throws JsonMappingException +2:113,114c +3:107,108c + public void validateSubType(DeserializationContext ctxt, JavaType type, + BeanDescription beanDesc) throws JsonMappingException +====1 +1:148,149c + throw JsonMappingException.from(ctxt, + String.format("Illegal type (%s) to deserialize: prevented for security reasons", full)); +2:156,157c +3:150,151c + ctxt.reportBadTypeDefinition(beanDesc, + "Illegal type (%s) to deserialize: prevented for security reasons", full); diff --git a/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_minimal/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_minimal/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..dcdf470ac9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_minimal/diff_VERSION-2.x.txt @@ -0,0 +1,11 @@ +9a10 +> #2341: Block one more gadget type (CVE-2019-12814) +28d28 +< <<<<<<< HEAD:release-notes/VERSION-2.x +341,346d340 +< ||||||| 4c935660a:release-notes/VERSION +< #2334: Block class for CVE-2019-12384 +< ======= +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< >>>>>>> TEMP_RIGHT_BRANCH:release-notes/VERSION diff --git a/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_myers/diff_SubTypeValidator.java.txt b/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_myers/diff_SubTypeValidator.java.txt new file mode 100644 index 0000000000..4d1ebecad3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_myers/diff_SubTypeValidator.java.txt @@ -0,0 +1,61 @@ +====1 +1:6a +2:7c +3:7c + import com.fasterxml.jackson.databind.BeanDescription; +====1 +1:69,71c + // [databind#1899]: more 3rd party + s.add("org.hibernate.jmx.StatisticsService"); + s.add("org.apache.ibatis.datasource.jndi.JndiDataSourceFactory"); +2:69a +3:69a +====1 +1:85,86c + // [databind#2326] + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +2:83,84c +3:83,84c + // [databind#2326] (2.9.9) + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +==== +1:88c + // [databind#2334] +2:86,92c + <<<<<<< HEAD + // [databind#2334] (2.9.9.1) + ||||||| 4c935660a + // [databind#2334] + ======= + // [databind#2334]: logback-core + >>>>>>> TEMP_RIGHT_BRANCH +3:86c + // [databind#2334]: logback-core (2.9.9.1) +==== +1:90a +2:95,98c + // [databind#2341]: jdom/jdom2 + s.add("org.jdom.transform.XSLTransformer"); + s.add("org.jdom2.transform.XSLTransformer"); + +3:89,92c + // [databind#2341]: jdom/jdom2 (2.9.9.1) + s.add("org.jdom.transform.XSLTransformer"); + s.add("org.jdom2.transform.XSLTransformer"); + +====1 +1:105,106c + public void validateSubType(DeserializationContext ctxt, JavaType type) + throws JsonMappingException +2:113,114c +3:107,108c + public void validateSubType(DeserializationContext ctxt, JavaType type, + BeanDescription beanDesc) throws JsonMappingException +====1 +1:148,149c + throw JsonMappingException.from(ctxt, + String.format("Illegal type (%s) to deserialize: prevented for security reasons", full)); +2:156,157c +3:150,151c + ctxt.reportBadTypeDefinition(beanDesc, + "Illegal type (%s) to deserialize: prevented for security reasons", full); diff --git a/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_myers/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_myers/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..dcdf470ac9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_myers/diff_VERSION-2.x.txt @@ -0,0 +1,11 @@ +9a10 +> #2341: Block one more gadget type (CVE-2019-12814) +28d28 +< <<<<<<< HEAD:release-notes/VERSION-2.x +341,346d340 +< ||||||| 4c935660a:release-notes/VERSION +< #2334: Block class for CVE-2019-12384 +< ======= +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< >>>>>>> TEMP_RIGHT_BRANCH:release-notes/VERSION diff --git a/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_patience/diff_SubTypeValidator.java.txt b/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_patience/diff_SubTypeValidator.java.txt new file mode 100644 index 0000000000..4d1ebecad3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_patience/diff_SubTypeValidator.java.txt @@ -0,0 +1,61 @@ +====1 +1:6a +2:7c +3:7c + import com.fasterxml.jackson.databind.BeanDescription; +====1 +1:69,71c + // [databind#1899]: more 3rd party + s.add("org.hibernate.jmx.StatisticsService"); + s.add("org.apache.ibatis.datasource.jndi.JndiDataSourceFactory"); +2:69a +3:69a +====1 +1:85,86c + // [databind#2326] + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +2:83,84c +3:83,84c + // [databind#2326] (2.9.9) + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +==== +1:88c + // [databind#2334] +2:86,92c + <<<<<<< HEAD + // [databind#2334] (2.9.9.1) + ||||||| 4c935660a + // [databind#2334] + ======= + // [databind#2334]: logback-core + >>>>>>> TEMP_RIGHT_BRANCH +3:86c + // [databind#2334]: logback-core (2.9.9.1) +==== +1:90a +2:95,98c + // [databind#2341]: jdom/jdom2 + s.add("org.jdom.transform.XSLTransformer"); + s.add("org.jdom2.transform.XSLTransformer"); + +3:89,92c + // [databind#2341]: jdom/jdom2 (2.9.9.1) + s.add("org.jdom.transform.XSLTransformer"); + s.add("org.jdom2.transform.XSLTransformer"); + +====1 +1:105,106c + public void validateSubType(DeserializationContext ctxt, JavaType type) + throws JsonMappingException +2:113,114c +3:107,108c + public void validateSubType(DeserializationContext ctxt, JavaType type, + BeanDescription beanDesc) throws JsonMappingException +====1 +1:148,149c + throw JsonMappingException.from(ctxt, + String.format("Illegal type (%s) to deserialize: prevented for security reasons", full)); +2:156,157c +3:150,151c + ctxt.reportBadTypeDefinition(beanDesc, + "Illegal type (%s) to deserialize: prevented for security reasons", full); diff --git a/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_patience/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_patience/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..dcdf470ac9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/gitmerge_recursive_patience/diff_VERSION-2.x.txt @@ -0,0 +1,11 @@ +9a10 +> #2341: Block one more gadget type (CVE-2019-12814) +28d28 +< <<<<<<< HEAD:release-notes/VERSION-2.x +341,346d340 +< ||||||| 4c935660a:release-notes/VERSION +< #2334: Block class for CVE-2019-12384 +< ======= +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< >>>>>>> TEMP_RIGHT_BRANCH:release-notes/VERSION diff --git a/src/python/merge_conflict_analysis_diffs/943/intellimerge/diff_SubTypeValidator.java.txt b/src/python/merge_conflict_analysis_diffs/943/intellimerge/diff_SubTypeValidator.java.txt new file mode 100644 index 0000000000..4d1ebecad3 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/intellimerge/diff_SubTypeValidator.java.txt @@ -0,0 +1,61 @@ +====1 +1:6a +2:7c +3:7c + import com.fasterxml.jackson.databind.BeanDescription; +====1 +1:69,71c + // [databind#1899]: more 3rd party + s.add("org.hibernate.jmx.StatisticsService"); + s.add("org.apache.ibatis.datasource.jndi.JndiDataSourceFactory"); +2:69a +3:69a +====1 +1:85,86c + // [databind#2326] + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +2:83,84c +3:83,84c + // [databind#2326] (2.9.9) + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +==== +1:88c + // [databind#2334] +2:86,92c + <<<<<<< HEAD + // [databind#2334] (2.9.9.1) + ||||||| 4c935660a + // [databind#2334] + ======= + // [databind#2334]: logback-core + >>>>>>> TEMP_RIGHT_BRANCH +3:86c + // [databind#2334]: logback-core (2.9.9.1) +==== +1:90a +2:95,98c + // [databind#2341]: jdom/jdom2 + s.add("org.jdom.transform.XSLTransformer"); + s.add("org.jdom2.transform.XSLTransformer"); + +3:89,92c + // [databind#2341]: jdom/jdom2 (2.9.9.1) + s.add("org.jdom.transform.XSLTransformer"); + s.add("org.jdom2.transform.XSLTransformer"); + +====1 +1:105,106c + public void validateSubType(DeserializationContext ctxt, JavaType type) + throws JsonMappingException +2:113,114c +3:107,108c + public void validateSubType(DeserializationContext ctxt, JavaType type, + BeanDescription beanDesc) throws JsonMappingException +====1 +1:148,149c + throw JsonMappingException.from(ctxt, + String.format("Illegal type (%s) to deserialize: prevented for security reasons", full)); +2:156,157c +3:150,151c + ctxt.reportBadTypeDefinition(beanDesc, + "Illegal type (%s) to deserialize: prevented for security reasons", full); diff --git a/src/python/merge_conflict_analysis_diffs/943/intellimerge/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/943/intellimerge/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..dcdf470ac9 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/intellimerge/diff_VERSION-2.x.txt @@ -0,0 +1,11 @@ +9a10 +> #2341: Block one more gadget type (CVE-2019-12814) +28d28 +< <<<<<<< HEAD:release-notes/VERSION-2.x +341,346d340 +< ||||||| 4c935660a:release-notes/VERSION +< #2334: Block class for CVE-2019-12384 +< ======= +< #2334: Block class for CVE-2019-12384 +< #2341: Block class for CVE-2019-12814 +< >>>>>>> TEMP_RIGHT_BRANCH:release-notes/VERSION diff --git a/src/python/merge_conflict_analysis_diffs/943/spork/diff_SubTypeValidator.java.txt b/src/python/merge_conflict_analysis_diffs/943/spork/diff_SubTypeValidator.java.txt new file mode 100644 index 0000000000..8ca33c8643 --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/spork/diff_SubTypeValidator.java.txt @@ -0,0 +1,51 @@ +====1 +1:6a +2:7c +3:7c + import com.fasterxml.jackson.databind.BeanDescription; +====1 +1:69,71c + // [databind#1899]: more 3rd party + s.add("org.hibernate.jmx.StatisticsService"); + s.add("org.apache.ibatis.datasource.jndi.JndiDataSourceFactory"); +2:69a +3:69a +====1 +1:85,86c + // [databind#2326] + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +2:83,84c +3:83,84c + // [databind#2326] (2.9.9) + s.add("com.mysql.cj.jdbc.admin.MiniAdmin"); +==== +1:88c + // [databind#2334] +2:86c + // [databind#2334] (2.9.9.1) +3:86c + // [databind#2334]: logback-core (2.9.9.1) +====3 +1:90a +2:88a +3:89,92c + // [databind#2341]: jdom/jdom2 (2.9.9.1) + s.add("org.jdom.transform.XSLTransformer"); + s.add("org.jdom2.transform.XSLTransformer"); + +====1 +1:105,106c + public void validateSubType(DeserializationContext ctxt, JavaType type) + throws JsonMappingException +2:103,104c +3:107,108c + public void validateSubType(DeserializationContext ctxt, JavaType type, + BeanDescription beanDesc) throws JsonMappingException +====1 +1:148,149c + throw JsonMappingException.from(ctxt, + String.format("Illegal type (%s) to deserialize: prevented for security reasons", full)); +2:146,147c +3:150,151c + ctxt.reportBadTypeDefinition(beanDesc, + "Illegal type (%s) to deserialize: prevented for security reasons", full); diff --git a/src/python/merge_conflict_analysis_diffs/943/spork/diff_VERSION-2.x.txt b/src/python/merge_conflict_analysis_diffs/943/spork/diff_VERSION-2.x.txt new file mode 100644 index 0000000000..1931fae61d --- /dev/null +++ b/src/python/merge_conflict_analysis_diffs/943/spork/diff_VERSION-2.x.txt @@ -0,0 +1,2 @@ +9a10 +> #2341: Block one more gadget type (CVE-2019-12814)